var/home/core/zuul-output/0000755000175000017500000000000015066723257014542 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015066741671015506 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005037772215066741662017727 0ustar rootrootSep 30 09:49:14 crc systemd[1]: Starting Kubernetes Kubelet... Sep 30 09:49:14 crc restorecon[4729]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:14 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 09:49:15 crc restorecon[4729]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 09:49:15 crc restorecon[4729]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 30 09:49:16 crc kubenswrapper[4730]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 09:49:16 crc kubenswrapper[4730]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 30 09:49:16 crc kubenswrapper[4730]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 09:49:16 crc kubenswrapper[4730]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 09:49:16 crc kubenswrapper[4730]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 30 09:49:16 crc kubenswrapper[4730]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.113687 4730 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121212 4730 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121256 4730 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121263 4730 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121269 4730 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121274 4730 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121280 4730 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121286 4730 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121292 4730 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121298 4730 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121305 4730 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121311 4730 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121317 4730 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121322 4730 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121328 4730 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121333 4730 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121338 4730 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121344 4730 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121349 4730 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121354 4730 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121359 4730 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121364 4730 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121370 4730 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121375 4730 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121380 4730 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121385 4730 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121391 4730 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121396 4730 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121418 4730 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121424 4730 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121429 4730 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121434 4730 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121440 4730 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121445 4730 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121451 4730 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121456 4730 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121462 4730 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121467 4730 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121472 4730 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121477 4730 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121483 4730 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121491 4730 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121499 4730 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121507 4730 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121513 4730 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121518 4730 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121525 4730 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121531 4730 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121537 4730 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121544 4730 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121550 4730 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121555 4730 feature_gate.go:330] unrecognized feature gate: Example Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121561 4730 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121566 4730 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121573 4730 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121579 4730 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121585 4730 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121591 4730 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121597 4730 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121603 4730 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121630 4730 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121636 4730 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121641 4730 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121647 4730 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121654 4730 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121660 4730 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121667 4730 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121673 4730 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121682 4730 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121689 4730 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121695 4730 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.121701 4730 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124334 4730 flags.go:64] FLAG: --address="0.0.0.0" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124368 4730 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124380 4730 flags.go:64] FLAG: --anonymous-auth="true" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124387 4730 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124395 4730 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124401 4730 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124407 4730 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124413 4730 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124418 4730 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124422 4730 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124428 4730 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124433 4730 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124437 4730 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124442 4730 flags.go:64] FLAG: --cgroup-root="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124446 4730 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124450 4730 flags.go:64] FLAG: --client-ca-file="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124455 4730 flags.go:64] FLAG: --cloud-config="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124459 4730 flags.go:64] FLAG: --cloud-provider="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124464 4730 flags.go:64] FLAG: --cluster-dns="[]" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124469 4730 flags.go:64] FLAG: --cluster-domain="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124473 4730 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124478 4730 flags.go:64] FLAG: --config-dir="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124482 4730 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124487 4730 flags.go:64] FLAG: --container-log-max-files="5" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124493 4730 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124498 4730 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124504 4730 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124510 4730 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124515 4730 flags.go:64] FLAG: --contention-profiling="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124520 4730 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124525 4730 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124531 4730 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124536 4730 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124543 4730 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124548 4730 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124552 4730 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124556 4730 flags.go:64] FLAG: --enable-load-reader="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124561 4730 flags.go:64] FLAG: --enable-server="true" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124565 4730 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124571 4730 flags.go:64] FLAG: --event-burst="100" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124577 4730 flags.go:64] FLAG: --event-qps="50" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124581 4730 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124585 4730 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124590 4730 flags.go:64] FLAG: --eviction-hard="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124595 4730 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124600 4730 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124604 4730 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124630 4730 flags.go:64] FLAG: --eviction-soft="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124635 4730 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124639 4730 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124643 4730 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124648 4730 flags.go:64] FLAG: --experimental-mounter-path="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124652 4730 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124656 4730 flags.go:64] FLAG: --fail-swap-on="true" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124661 4730 flags.go:64] FLAG: --feature-gates="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124666 4730 flags.go:64] FLAG: --file-check-frequency="20s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124670 4730 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124675 4730 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124680 4730 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124684 4730 flags.go:64] FLAG: --healthz-port="10248" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124690 4730 flags.go:64] FLAG: --help="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124695 4730 flags.go:64] FLAG: --hostname-override="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124699 4730 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124703 4730 flags.go:64] FLAG: --http-check-frequency="20s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124708 4730 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124712 4730 flags.go:64] FLAG: --image-credential-provider-config="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124716 4730 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124721 4730 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124725 4730 flags.go:64] FLAG: --image-service-endpoint="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124730 4730 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124734 4730 flags.go:64] FLAG: --kube-api-burst="100" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124738 4730 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124743 4730 flags.go:64] FLAG: --kube-api-qps="50" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124747 4730 flags.go:64] FLAG: --kube-reserved="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124753 4730 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124757 4730 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124763 4730 flags.go:64] FLAG: --kubelet-cgroups="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124767 4730 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124771 4730 flags.go:64] FLAG: --lock-file="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124775 4730 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124780 4730 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124784 4730 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124791 4730 flags.go:64] FLAG: --log-json-split-stream="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124795 4730 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124799 4730 flags.go:64] FLAG: --log-text-split-stream="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124803 4730 flags.go:64] FLAG: --logging-format="text" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124807 4730 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124812 4730 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124816 4730 flags.go:64] FLAG: --manifest-url="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124820 4730 flags.go:64] FLAG: --manifest-url-header="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124827 4730 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124836 4730 flags.go:64] FLAG: --max-open-files="1000000" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124843 4730 flags.go:64] FLAG: --max-pods="110" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124848 4730 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124852 4730 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124857 4730 flags.go:64] FLAG: --memory-manager-policy="None" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124861 4730 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124865 4730 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124869 4730 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124874 4730 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124887 4730 flags.go:64] FLAG: --node-status-max-images="50" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124891 4730 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124896 4730 flags.go:64] FLAG: --oom-score-adj="-999" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124900 4730 flags.go:64] FLAG: --pod-cidr="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124905 4730 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124914 4730 flags.go:64] FLAG: --pod-manifest-path="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124918 4730 flags.go:64] FLAG: --pod-max-pids="-1" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124923 4730 flags.go:64] FLAG: --pods-per-core="0" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124927 4730 flags.go:64] FLAG: --port="10250" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124932 4730 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124936 4730 flags.go:64] FLAG: --provider-id="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124940 4730 flags.go:64] FLAG: --qos-reserved="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124945 4730 flags.go:64] FLAG: --read-only-port="10255" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124950 4730 flags.go:64] FLAG: --register-node="true" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124954 4730 flags.go:64] FLAG: --register-schedulable="true" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124959 4730 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124967 4730 flags.go:64] FLAG: --registry-burst="10" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124972 4730 flags.go:64] FLAG: --registry-qps="5" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124976 4730 flags.go:64] FLAG: --reserved-cpus="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124980 4730 flags.go:64] FLAG: --reserved-memory="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124986 4730 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124990 4730 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124995 4730 flags.go:64] FLAG: --rotate-certificates="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.124999 4730 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125003 4730 flags.go:64] FLAG: --runonce="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125010 4730 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125014 4730 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125019 4730 flags.go:64] FLAG: --seccomp-default="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125023 4730 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125027 4730 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125032 4730 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125036 4730 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125041 4730 flags.go:64] FLAG: --storage-driver-password="root" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125045 4730 flags.go:64] FLAG: --storage-driver-secure="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125049 4730 flags.go:64] FLAG: --storage-driver-table="stats" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125054 4730 flags.go:64] FLAG: --storage-driver-user="root" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125058 4730 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125062 4730 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125067 4730 flags.go:64] FLAG: --system-cgroups="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125071 4730 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125078 4730 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125082 4730 flags.go:64] FLAG: --tls-cert-file="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125086 4730 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125091 4730 flags.go:64] FLAG: --tls-min-version="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125095 4730 flags.go:64] FLAG: --tls-private-key-file="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125100 4730 flags.go:64] FLAG: --topology-manager-policy="none" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125104 4730 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125108 4730 flags.go:64] FLAG: --topology-manager-scope="container" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125114 4730 flags.go:64] FLAG: --v="2" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125120 4730 flags.go:64] FLAG: --version="false" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125126 4730 flags.go:64] FLAG: --vmodule="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125132 4730 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125136 4730 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125294 4730 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125300 4730 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125305 4730 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125309 4730 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125315 4730 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125319 4730 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125323 4730 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125326 4730 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125330 4730 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125334 4730 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125337 4730 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125340 4730 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125344 4730 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125348 4730 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125351 4730 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125355 4730 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125358 4730 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125361 4730 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125365 4730 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125369 4730 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125372 4730 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125377 4730 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125382 4730 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125386 4730 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125390 4730 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125393 4730 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125397 4730 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125400 4730 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125404 4730 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125407 4730 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125411 4730 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125415 4730 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125419 4730 feature_gate.go:330] unrecognized feature gate: Example Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125422 4730 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125427 4730 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125431 4730 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125436 4730 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125439 4730 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125443 4730 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125447 4730 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125451 4730 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125454 4730 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125458 4730 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125461 4730 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125465 4730 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125468 4730 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125472 4730 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125476 4730 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125481 4730 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125485 4730 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125489 4730 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125493 4730 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125497 4730 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125500 4730 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125504 4730 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125508 4730 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125511 4730 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125515 4730 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125518 4730 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125522 4730 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125526 4730 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125529 4730 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125533 4730 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125536 4730 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125539 4730 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125544 4730 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125548 4730 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125558 4730 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125566 4730 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125571 4730 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.125574 4730 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.125586 4730 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.135117 4730 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.135178 4730 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135278 4730 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135292 4730 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135298 4730 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135305 4730 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135312 4730 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135318 4730 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135324 4730 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135330 4730 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135336 4730 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135341 4730 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135347 4730 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135352 4730 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135389 4730 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135395 4730 feature_gate.go:330] unrecognized feature gate: Example Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135400 4730 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135406 4730 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135411 4730 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135416 4730 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135421 4730 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135426 4730 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135432 4730 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135437 4730 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135442 4730 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135448 4730 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135453 4730 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135459 4730 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135464 4730 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135469 4730 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135474 4730 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135479 4730 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135485 4730 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135491 4730 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135498 4730 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135504 4730 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135512 4730 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135518 4730 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135523 4730 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135529 4730 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135534 4730 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135539 4730 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135546 4730 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135553 4730 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135559 4730 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135564 4730 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135569 4730 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135576 4730 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135585 4730 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135591 4730 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135596 4730 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135627 4730 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135637 4730 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135653 4730 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135663 4730 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135671 4730 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135679 4730 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135688 4730 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135696 4730 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135703 4730 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135710 4730 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135718 4730 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135725 4730 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135732 4730 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135737 4730 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135742 4730 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135747 4730 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135753 4730 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135758 4730 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135763 4730 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135770 4730 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135777 4730 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135786 4730 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.135796 4730 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135964 4730 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135975 4730 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135981 4730 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135987 4730 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135993 4730 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.135999 4730 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136005 4730 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136010 4730 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136015 4730 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136022 4730 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136029 4730 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136034 4730 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136040 4730 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136046 4730 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136051 4730 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136060 4730 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136065 4730 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136071 4730 feature_gate.go:330] unrecognized feature gate: Example Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136076 4730 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136081 4730 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136087 4730 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136092 4730 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136097 4730 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136102 4730 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136108 4730 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136113 4730 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136119 4730 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136125 4730 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136130 4730 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136136 4730 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136141 4730 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136147 4730 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136152 4730 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136158 4730 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136164 4730 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136169 4730 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136175 4730 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136180 4730 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136186 4730 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136192 4730 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136198 4730 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136206 4730 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136212 4730 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136218 4730 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136224 4730 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136231 4730 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136237 4730 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136243 4730 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136248 4730 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136253 4730 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136258 4730 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136264 4730 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136270 4730 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136275 4730 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136282 4730 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136289 4730 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136294 4730 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136299 4730 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136305 4730 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136310 4730 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136315 4730 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136319 4730 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136325 4730 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136330 4730 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136335 4730 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136340 4730 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136345 4730 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136350 4730 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136355 4730 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136360 4730 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.136367 4730 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.136376 4730 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.137688 4730 server.go:940] "Client rotation is on, will bootstrap in background" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.142222 4730 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.142341 4730 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.144113 4730 server.go:997] "Starting client certificate rotation" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.144144 4730 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.144317 4730 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-18 18:22:14.014993755 +0000 UTC Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.144375 4730 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1904h32m57.870620531s for next certificate rotation Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.182401 4730 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.184586 4730 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.205765 4730 log.go:25] "Validated CRI v1 runtime API" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.246197 4730 log.go:25] "Validated CRI v1 image API" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.248708 4730 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.255348 4730 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-30-09-44-00-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.255433 4730 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:43 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.283505 4730 manager.go:217] Machine: {Timestamp:2025-09-30 09:49:16.272758983 +0000 UTC m=+0.606019006 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654132736 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:07b44d08-082f-49ea-b265-a8fb7a484875 BootID:106859cf-ef10-430a-91cd-145c67df2de1 Filesystems:[{Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827068416 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:43 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730829824 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:3a:b4:5d Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:3a:b4:5d Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:96:85:46 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:b2:a3:bb Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:76:bf:27 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:e6:92:1e Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:41:53:a4 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:16:b5:34:c9:60:e5 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:22:d8:02:27:4e:09 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654132736 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.283964 4730 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.284201 4730 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.285592 4730 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.285939 4730 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.285983 4730 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.286335 4730 topology_manager.go:138] "Creating topology manager with none policy" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.286350 4730 container_manager_linux.go:303] "Creating device plugin manager" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.287069 4730 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.287117 4730 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.287287 4730 state_mem.go:36] "Initialized new in-memory state store" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.287403 4730 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.291753 4730 kubelet.go:418] "Attempting to sync node with API server" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.291813 4730 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.291867 4730 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.291895 4730 kubelet.go:324] "Adding apiserver pod source" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.291920 4730 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.297772 4730 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.300665 4730 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.308410 4730 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.308368 4730 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.104:6443: connect: connection refused Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.308413 4730 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.104:6443: connect: connection refused Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.308760 4730 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.104:6443: connect: connection refused" logger="UnhandledError" Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.308769 4730 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.104:6443: connect: connection refused" logger="UnhandledError" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.310843 4730 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.310898 4730 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.310909 4730 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.310919 4730 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.310935 4730 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.310946 4730 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.310958 4730 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.310973 4730 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.310986 4730 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.310998 4730 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.311022 4730 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.311032 4730 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.312214 4730 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.312935 4730 server.go:1280] "Started kubelet" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.313932 4730 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 30 09:49:16 crc systemd[1]: Started Kubernetes Kubelet. Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.315831 4730 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.316597 4730 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.316675 4730 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.104:6443: connect: connection refused Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.317313 4730 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.317397 4730 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.317424 4730 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 16:13:22.238589322 +0000 UTC Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.317482 4730 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 966h24m5.921111688s for next certificate rotation Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.317553 4730 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.317573 4730 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.317645 4730 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.317820 4730 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.318455 4730 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.104:6443: connect: connection refused Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.318514 4730 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.104:6443: connect: connection refused" logger="UnhandledError" Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.319251 4730 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.104:6443: connect: connection refused" interval="200ms" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.320260 4730 factory.go:55] Registering systemd factory Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.320345 4730 factory.go:221] Registration of the systemd container factory successfully Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.320928 4730 factory.go:153] Registering CRI-O factory Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.320954 4730 factory.go:221] Registration of the crio container factory successfully Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.320990 4730 server.go:460] "Adding debug handlers to kubelet server" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.321170 4730 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.321206 4730 factory.go:103] Registering Raw factory Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.321229 4730 manager.go:1196] Started watching for new ooms in manager Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.322290 4730 manager.go:319] Starting recovery of all containers Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.321751 4730 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.104:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a06812cbf67b4 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-30 09:49:16.312889268 +0000 UTC m=+0.646149281,LastTimestamp:2025-09-30 09:49:16.312889268 +0000 UTC m=+0.646149281,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329156 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329248 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329276 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329292 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329306 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329325 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329338 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329351 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329372 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329387 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329405 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329422 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329441 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329458 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329481 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329497 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329518 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329685 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329700 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329720 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329738 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329754 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329773 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329787 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329805 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329818 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329840 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329858 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329880 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329893 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329916 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329930 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329954 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.329973 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.335816 4730 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336209 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336237 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336250 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336263 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336276 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336290 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336306 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336319 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336332 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336343 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336356 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336368 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336381 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336402 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336417 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336429 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336446 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336457 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336477 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336491 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336504 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336515 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336527 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336541 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336553 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336562 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336573 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336584 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336595 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336606 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336699 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336711 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336725 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336736 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336748 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336760 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336770 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336782 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336792 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336804 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.336814 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338056 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338388 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338425 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338437 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338452 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338465 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338479 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338491 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338503 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338514 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338525 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338537 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338548 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338560 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338574 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338589 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338607 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338635 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338648 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338661 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338673 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338688 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338705 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338719 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338730 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338742 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338755 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338766 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338779 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338799 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338812 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338824 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338866 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338879 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338894 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338906 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338919 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338931 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338944 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338957 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338970 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338982 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.338997 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339010 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339021 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339033 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339044 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339056 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339069 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339081 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339094 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339108 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339119 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339130 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339141 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339153 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339165 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339177 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339189 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339201 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339212 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339223 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339235 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339247 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339261 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339273 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339283 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339295 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339305 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339317 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339329 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339341 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339351 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339362 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339373 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339383 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339393 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339405 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339423 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339434 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339445 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339455 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339466 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339478 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339488 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339498 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339508 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339519 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339530 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339541 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339552 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339563 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339574 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339646 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339660 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339672 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339685 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339696 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339708 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339726 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339748 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339765 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339779 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339792 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339804 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339816 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339828 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339840 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339850 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339862 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339873 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339884 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339897 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339910 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339920 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339933 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339957 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339973 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.339990 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340004 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340018 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340031 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340046 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340060 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340072 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340083 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340096 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340106 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340124 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340137 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340152 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340164 4730 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340177 4730 reconstruct.go:97] "Volume reconstruction finished" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.340189 4730 reconciler.go:26] "Reconciler: start to sync state" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.347379 4730 manager.go:324] Recovery completed Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.357791 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.360270 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.360327 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.360340 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.361262 4730 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.361293 4730 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.361330 4730 state_mem.go:36] "Initialized new in-memory state store" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.377725 4730 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.379513 4730 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.379561 4730 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.379584 4730 kubelet.go:2335] "Starting kubelet main sync loop" Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.379691 4730 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.380496 4730 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.104:6443: connect: connection refused Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.380549 4730 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.104:6443: connect: connection refused" logger="UnhandledError" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.384211 4730 policy_none.go:49] "None policy: Start" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.385246 4730 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.385296 4730 state_mem.go:35] "Initializing new in-memory state store" Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.418583 4730 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.449112 4730 manager.go:334] "Starting Device Plugin manager" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.449480 4730 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.449506 4730 server.go:79] "Starting device plugin registration server" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.450080 4730 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.450101 4730 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.450253 4730 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.450424 4730 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.450445 4730 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.459190 4730 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.480231 4730 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.480377 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.481956 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.482009 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.482217 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.483215 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.483945 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.484000 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.484858 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.484938 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.484952 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.485211 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.485405 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.485458 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.487695 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.487742 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.487755 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.487807 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.487847 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.487866 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.487917 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.487951 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.487969 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.488134 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.488187 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.488252 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.489327 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.489364 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.489377 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.489570 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.489626 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.489644 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.489826 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.489952 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.489990 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.491421 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.491455 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.491471 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.491483 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.491499 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.491485 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.491731 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.491779 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.492668 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.492705 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.492719 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.520049 4730 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.104:6443: connect: connection refused" interval="400ms" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542247 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542289 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542317 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542338 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542415 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542478 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542514 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542544 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542580 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542661 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542683 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542700 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542716 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542732 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.542751 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.550435 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.551741 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.551806 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.551820 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.551870 4730 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.552374 4730 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.104:6443: connect: connection refused" node="crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644124 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644192 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644226 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644255 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644279 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644301 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644326 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644330 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644380 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644347 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644353 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644401 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644438 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644469 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644427 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644413 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644560 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644599 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644646 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644724 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644730 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644728 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644756 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644772 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644831 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644843 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644856 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644881 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644945 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.644992 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.752868 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.754456 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.754505 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.754514 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.754551 4730 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.755270 4730 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.104:6443: connect: connection refused" node="crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.820074 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.829902 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.849181 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.857377 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: I0930 09:49:16.862859 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.887456 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-2f857604fcc8dfcd0c52d3387bcb39febf911ae7d7cf16bb180b2ee78b443de9 WatchSource:0}: Error finding container 2f857604fcc8dfcd0c52d3387bcb39febf911ae7d7cf16bb180b2ee78b443de9: Status 404 returned error can't find the container with id 2f857604fcc8dfcd0c52d3387bcb39febf911ae7d7cf16bb180b2ee78b443de9 Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.888917 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-c9709b6ce8acf5edb0e2f21e2f58e39f40b4487f779b51dc17801184ae4e29a5 WatchSource:0}: Error finding container c9709b6ce8acf5edb0e2f21e2f58e39f40b4487f779b51dc17801184ae4e29a5: Status 404 returned error can't find the container with id c9709b6ce8acf5edb0e2f21e2f58e39f40b4487f779b51dc17801184ae4e29a5 Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.892378 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-edb1c00e62b7d1be16e3b24ff1b9c1c9b47892da9d0a2d52fdbedd13a3a80693 WatchSource:0}: Error finding container edb1c00e62b7d1be16e3b24ff1b9c1c9b47892da9d0a2d52fdbedd13a3a80693: Status 404 returned error can't find the container with id edb1c00e62b7d1be16e3b24ff1b9c1c9b47892da9d0a2d52fdbedd13a3a80693 Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.894944 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-a472a745a151f9971fa44e20d282f3e18ee367fcc97912c6caa273fe5d117e6c WatchSource:0}: Error finding container a472a745a151f9971fa44e20d282f3e18ee367fcc97912c6caa273fe5d117e6c: Status 404 returned error can't find the container with id a472a745a151f9971fa44e20d282f3e18ee367fcc97912c6caa273fe5d117e6c Sep 30 09:49:16 crc kubenswrapper[4730]: W0930 09:49:16.897462 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-04ad8a0f2787e20518cfbbb0012b845afff4c9d57e4bde2f22cc131a26ee00b0 WatchSource:0}: Error finding container 04ad8a0f2787e20518cfbbb0012b845afff4c9d57e4bde2f22cc131a26ee00b0: Status 404 returned error can't find the container with id 04ad8a0f2787e20518cfbbb0012b845afff4c9d57e4bde2f22cc131a26ee00b0 Sep 30 09:49:16 crc kubenswrapper[4730]: E0930 09:49:16.921237 4730 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.104:6443: connect: connection refused" interval="800ms" Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.155782 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.157618 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.157653 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.157663 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.157685 4730 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 09:49:17 crc kubenswrapper[4730]: E0930 09:49:17.158225 4730 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.104:6443: connect: connection refused" node="crc" Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.317344 4730 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.104:6443: connect: connection refused Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.385800 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"04ad8a0f2787e20518cfbbb0012b845afff4c9d57e4bde2f22cc131a26ee00b0"} Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.387011 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a472a745a151f9971fa44e20d282f3e18ee367fcc97912c6caa273fe5d117e6c"} Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.388422 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"edb1c00e62b7d1be16e3b24ff1b9c1c9b47892da9d0a2d52fdbedd13a3a80693"} Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.390170 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c9709b6ce8acf5edb0e2f21e2f58e39f40b4487f779b51dc17801184ae4e29a5"} Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.391292 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"2f857604fcc8dfcd0c52d3387bcb39febf911ae7d7cf16bb180b2ee78b443de9"} Sep 30 09:49:17 crc kubenswrapper[4730]: W0930 09:49:17.403157 4730 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.104:6443: connect: connection refused Sep 30 09:49:17 crc kubenswrapper[4730]: E0930 09:49:17.403254 4730 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.104:6443: connect: connection refused" logger="UnhandledError" Sep 30 09:49:17 crc kubenswrapper[4730]: W0930 09:49:17.707803 4730 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.104:6443: connect: connection refused Sep 30 09:49:17 crc kubenswrapper[4730]: E0930 09:49:17.707915 4730 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.104:6443: connect: connection refused" logger="UnhandledError" Sep 30 09:49:17 crc kubenswrapper[4730]: E0930 09:49:17.723012 4730 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.104:6443: connect: connection refused" interval="1.6s" Sep 30 09:49:17 crc kubenswrapper[4730]: W0930 09:49:17.764323 4730 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.104:6443: connect: connection refused Sep 30 09:49:17 crc kubenswrapper[4730]: E0930 09:49:17.764426 4730 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.104:6443: connect: connection refused" logger="UnhandledError" Sep 30 09:49:17 crc kubenswrapper[4730]: W0930 09:49:17.886958 4730 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.104:6443: connect: connection refused Sep 30 09:49:17 crc kubenswrapper[4730]: E0930 09:49:17.887118 4730 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.104:6443: connect: connection refused" logger="UnhandledError" Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.958593 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.959853 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.959885 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.959895 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:17 crc kubenswrapper[4730]: I0930 09:49:17.959925 4730 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 09:49:17 crc kubenswrapper[4730]: E0930 09:49:17.960516 4730 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.104:6443: connect: connection refused" node="crc" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.317946 4730 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.104:6443: connect: connection refused Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.396812 4730 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="a1bb754504953177858ef8742f7c3889393c8478b60edfaed19f1944bf2b6e0f" exitCode=0 Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.396927 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"a1bb754504953177858ef8742f7c3889393c8478b60edfaed19f1944bf2b6e0f"} Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.396979 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.398177 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.398213 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.398227 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.400088 4730 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd" exitCode=0 Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.400187 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.400208 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd"} Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.401453 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.401482 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.401495 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.406371 4730 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b" exitCode=0 Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.406422 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b"} Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.406517 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.407562 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.407622 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.407639 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.409948 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3"} Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.409988 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6"} Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.410006 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1"} Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.410022 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722"} Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.410009 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.410788 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.411068 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.411092 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.411464 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.412147 4730 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2583fd65bcb5ce4e7d86e114c8b5c95f903f9e64932025452e21d6635dcd9ec1" exitCode=0 Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.412194 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2583fd65bcb5ce4e7d86e114c8b5c95f903f9e64932025452e21d6635dcd9ec1"} Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.412306 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.412328 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.412337 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.412344 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.413082 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.413111 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:18 crc kubenswrapper[4730]: I0930 09:49:18.413121 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.318023 4730 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.104:6443: connect: connection refused Sep 30 09:49:19 crc kubenswrapper[4730]: E0930 09:49:19.323769 4730 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.104:6443: connect: connection refused" interval="3.2s" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.417388 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"04522c3f227c31fd9dc48f62caeaa83a5e3c3d9ef7a60e33e6a20f41ecafdf5f"} Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.417503 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.418764 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.418797 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.418806 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.420791 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"461de41b45c19fbaf8e606cd30458759972ca804bac7e4cd5495efd8f30280fd"} Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.420852 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"58b48296b6764c18fc1b5163c1ef503b124b97f11f287fbbbc5b1144ecd06bbe"} Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.420864 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1d84dba8205ee9f4469b0277711b38ff82a1068b3f1cd951ea4a4eefab51ba88"} Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.420969 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.421985 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.422053 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.422066 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.424320 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228"} Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.424371 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b"} Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.424383 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c"} Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.424392 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6"} Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.426886 4730 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="56fa7a8024e3b7481fed8e05647dfe8abc6b4616a02dbe2907e3900c5c460846" exitCode=0 Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.427065 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.427213 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"56fa7a8024e3b7481fed8e05647dfe8abc6b4616a02dbe2907e3900c5c460846"} Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.427356 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.428475 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.428518 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.428532 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.428516 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.428585 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.428600 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.561167 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.562814 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.562902 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.562917 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:19 crc kubenswrapper[4730]: I0930 09:49:19.562953 4730 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 09:49:19 crc kubenswrapper[4730]: E0930 09:49:19.563658 4730 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.104:6443: connect: connection refused" node="crc" Sep 30 09:49:19 crc kubenswrapper[4730]: W0930 09:49:19.932535 4730 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.104:6443: connect: connection refused Sep 30 09:49:19 crc kubenswrapper[4730]: E0930 09:49:19.932685 4730 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.104:6443: connect: connection refused" logger="UnhandledError" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.432457 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7"} Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.432493 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.433380 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.433410 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.433422 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.435180 4730 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="4cab97e74f25e7263dd4b6c14766ae6b7c77e0c8b172f4dcc292bb2b1140ff06" exitCode=0 Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.435291 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.435297 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"4cab97e74f25e7263dd4b6c14766ae6b7c77e0c8b172f4dcc292bb2b1140ff06"} Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.435355 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.435298 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.435383 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.436304 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.436348 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.436359 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.436397 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.436426 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.436438 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.437186 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.437221 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:20 crc kubenswrapper[4730]: I0930 09:49:20.437233 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.233662 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.446566 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"dd21cf58f771027cee4a1ad76bd2d29f7ba6e4a8594b8bec9557d3438e393126"} Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.446655 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"92f3455b7f8157102ef97224fa964838712ec1f325fac6f8dfc41167059d6f20"} Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.446679 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"224dca792cb1d410dea56a59cb54bd1ef62e51f5a77d822447abe070a3a70eb5"} Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.446695 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"437e64079748b722fe3b2f4dd5bee808da571457c5a08d7443c9aabc378d7fcc"} Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.446704 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.446726 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.447997 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.448005 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.448028 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.448037 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.448054 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.448039 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.540430 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.747161 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.747379 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.748962 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.749000 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:21 crc kubenswrapper[4730]: I0930 09:49:21.749012 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.454401 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"877dedb002bb80a66f4e138cfbc8e1b9b3148343c107264ce4faf0697a166697"} Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.454504 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.454669 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.456287 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.456320 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.456330 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.457407 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.457433 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.457447 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.764499 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.766279 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.766367 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.766388 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:22 crc kubenswrapper[4730]: I0930 09:49:22.766477 4730 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.377989 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.378264 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.380088 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.380173 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.380195 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.386263 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.456777 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.456892 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.456997 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.457126 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.461072 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.461129 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.461143 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.461415 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.461475 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.461516 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.461510 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.461587 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:23 crc kubenswrapper[4730]: I0930 09:49:23.461679 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.351523 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.460760 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.460764 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.462249 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.462308 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.462327 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.462499 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.462547 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.462565 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.748870 4730 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.748969 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.839985 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.840365 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.841750 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.841805 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:24 crc kubenswrapper[4730]: I0930 09:49:24.841821 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:25 crc kubenswrapper[4730]: I0930 09:49:25.155586 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 30 09:49:25 crc kubenswrapper[4730]: I0930 09:49:25.463390 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:25 crc kubenswrapper[4730]: I0930 09:49:25.464757 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:25 crc kubenswrapper[4730]: I0930 09:49:25.464836 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:25 crc kubenswrapper[4730]: I0930 09:49:25.464851 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:26 crc kubenswrapper[4730]: E0930 09:49:26.459413 4730 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 09:49:26 crc kubenswrapper[4730]: I0930 09:49:26.819813 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:26 crc kubenswrapper[4730]: I0930 09:49:26.820041 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:26 crc kubenswrapper[4730]: I0930 09:49:26.821687 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:26 crc kubenswrapper[4730]: I0930 09:49:26.821740 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:26 crc kubenswrapper[4730]: I0930 09:49:26.821755 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:27 crc kubenswrapper[4730]: I0930 09:49:27.694731 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:27 crc kubenswrapper[4730]: I0930 09:49:27.694902 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:27 crc kubenswrapper[4730]: I0930 09:49:27.696178 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:27 crc kubenswrapper[4730]: I0930 09:49:27.696222 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:27 crc kubenswrapper[4730]: I0930 09:49:27.696233 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:30 crc kubenswrapper[4730]: W0930 09:49:30.053312 4730 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.053442 4730 trace.go:236] Trace[1865341819]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 09:49:20.052) (total time: 10001ms): Sep 30 09:49:30 crc kubenswrapper[4730]: Trace[1865341819]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (09:49:30.053) Sep 30 09:49:30 crc kubenswrapper[4730]: Trace[1865341819]: [10.001127372s] [10.001127372s] END Sep 30 09:49:30 crc kubenswrapper[4730]: E0930 09:49:30.053475 4730 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.318964 4730 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Sep 30 09:49:30 crc kubenswrapper[4730]: W0930 09:49:30.472344 4730 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.472741 4730 trace.go:236] Trace[396923926]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 09:49:20.470) (total time: 10002ms): Sep 30 09:49:30 crc kubenswrapper[4730]: Trace[396923926]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (09:49:30.472) Sep 30 09:49:30 crc kubenswrapper[4730]: Trace[396923926]: [10.002454648s] [10.002454648s] END Sep 30 09:49:30 crc kubenswrapper[4730]: E0930 09:49:30.472766 4730 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.479071 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.480499 4730 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7" exitCode=255 Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.480562 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7"} Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.480836 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.482244 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.482309 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.482321 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.482827 4730 scope.go:117] "RemoveContainer" containerID="009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7" Sep 30 09:49:30 crc kubenswrapper[4730]: W0930 09:49:30.515465 4730 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.515593 4730 trace.go:236] Trace[1067444146]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 09:49:20.514) (total time: 10001ms): Sep 30 09:49:30 crc kubenswrapper[4730]: Trace[1067444146]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (09:49:30.515) Sep 30 09:49:30 crc kubenswrapper[4730]: Trace[1067444146]: [10.001507302s] [10.001507302s] END Sep 30 09:49:30 crc kubenswrapper[4730]: E0930 09:49:30.515646 4730 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.821208 4730 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.821276 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.830037 4730 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 30 09:49:30 crc kubenswrapper[4730]: I0930 09:49:30.830121 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 30 09:49:31 crc kubenswrapper[4730]: I0930 09:49:31.485708 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 09:49:31 crc kubenswrapper[4730]: I0930 09:49:31.487270 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010"} Sep 30 09:49:31 crc kubenswrapper[4730]: I0930 09:49:31.487480 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:31 crc kubenswrapper[4730]: I0930 09:49:31.488484 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:31 crc kubenswrapper[4730]: I0930 09:49:31.488557 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:31 crc kubenswrapper[4730]: I0930 09:49:31.488574 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:33 crc kubenswrapper[4730]: I0930 09:49:33.926696 4730 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.374499 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.374747 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.376214 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.376353 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.376448 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.386133 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.495845 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.497137 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.497190 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.497207 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.748141 4730 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded" start-of-body= Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.748232 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.846473 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.846665 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.846765 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.847938 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.847988 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.848003 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:34 crc kubenswrapper[4730]: I0930 09:49:34.852824 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:35 crc kubenswrapper[4730]: I0930 09:49:35.276526 4730 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 30 09:49:35 crc kubenswrapper[4730]: I0930 09:49:35.498404 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:35 crc kubenswrapper[4730]: I0930 09:49:35.499290 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:35 crc kubenswrapper[4730]: I0930 09:49:35.499323 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:35 crc kubenswrapper[4730]: I0930 09:49:35.499334 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:35 crc kubenswrapper[4730]: E0930 09:49:35.824688 4730 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 30 09:49:35 crc kubenswrapper[4730]: I0930 09:49:35.828640 4730 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 30 09:49:35 crc kubenswrapper[4730]: I0930 09:49:35.828691 4730 trace.go:236] Trace[60458145]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 09:49:23.949) (total time: 11878ms): Sep 30 09:49:35 crc kubenswrapper[4730]: Trace[60458145]: ---"Objects listed" error: 11878ms (09:49:35.828) Sep 30 09:49:35 crc kubenswrapper[4730]: Trace[60458145]: [11.878992277s] [11.878992277s] END Sep 30 09:49:35 crc kubenswrapper[4730]: I0930 09:49:35.828726 4730 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 30 09:49:35 crc kubenswrapper[4730]: E0930 09:49:35.829056 4730 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.306542 4730 apiserver.go:52] "Watching apiserver" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.311268 4730 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.311589 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.311980 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.312022 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.312040 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.312368 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.312560 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.312560 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.312980 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.313112 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.313014 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.314290 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.316818 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.316940 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.316827 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.316859 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.316858 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.317145 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.316907 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.317315 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.319040 4730 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.331904 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.331975 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.331998 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.332396 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.332662 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.332725 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.332751 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.332916 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333075 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333147 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333459 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333505 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333177 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333586 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333636 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333659 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333686 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333708 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333729 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333731 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333751 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333776 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333800 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333823 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333848 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333871 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333887 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333903 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333919 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333937 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.333992 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334021 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334037 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334052 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334068 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334084 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334101 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334140 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334155 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334173 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334187 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334202 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334217 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334235 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334250 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334267 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334285 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334301 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334328 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334344 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334359 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334374 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334390 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334408 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334425 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334453 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334480 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334502 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334518 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334533 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334554 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334620 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334637 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334651 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334669 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334684 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334702 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334720 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334736 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334752 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334769 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334793 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334900 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334920 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334937 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334956 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334975 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335016 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335038 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335066 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335100 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335123 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335148 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335171 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335187 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335209 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335226 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335244 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335261 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335278 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335301 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335323 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335342 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335358 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335375 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335392 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335410 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335429 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335445 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335464 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335482 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335503 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335519 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335536 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335554 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335571 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335587 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335603 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335636 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335652 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335671 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335687 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335704 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335720 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335737 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335756 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335772 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335808 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335828 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335860 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335892 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335915 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335934 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335990 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336016 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336033 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336050 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336069 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336085 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336102 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336120 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336138 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336155 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336173 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336189 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336209 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336225 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337282 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337329 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337358 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337381 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337409 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334079 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334599 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.338931 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334844 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334967 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.334965 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335008 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335089 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335107 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335127 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335187 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335218 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335242 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335282 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335410 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335428 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335450 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335470 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335663 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335719 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335728 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335790 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.335885 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336010 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336044 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336064 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336080 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336125 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336065 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336219 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.336317 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337308 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337338 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337436 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337508 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337691 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337869 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337945 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337945 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.337945 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.338055 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.338107 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.338255 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.338405 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.339330 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.338567 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:49:36.8385313 +0000 UTC m=+21.171791293 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.338704 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.338741 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.338790 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.339698 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.339303 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.339747 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.339893 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.340128 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.340142 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.340177 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.340353 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.340496 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.340500 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.340571 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.341531 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.341645 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.341693 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.342122 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.342254 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.342406 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.342419 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.342461 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.342521 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.342014 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.342739 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.343858 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.343867 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.343935 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.344200 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.344219 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.344231 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.344238 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.344252 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.343379 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.343538 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.343627 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.344440 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.344461 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.344679 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.344803 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.344874 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.344960 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.345338 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.345352 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.345481 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.345602 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.339726 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.345600 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.343053 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.342913 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.345943 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.346107 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.346146 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.346178 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.346210 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.346303 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347014 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347172 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347206 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347227 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347249 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347265 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347284 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347304 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347325 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347343 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347365 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347366 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347383 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347400 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347420 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347440 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347457 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347477 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347495 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347514 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347530 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347552 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347571 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347588 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347626 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347652 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347672 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347692 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347712 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347733 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347750 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347769 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347787 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347804 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347824 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347846 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347866 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347888 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347890 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347911 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347935 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347962 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.347981 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348000 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348018 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348038 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348056 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348076 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348094 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348114 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348132 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348150 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348168 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348185 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348202 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348220 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348281 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348307 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348329 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348365 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348375 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348433 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348462 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348482 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348507 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348532 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348555 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348603 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348673 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348693 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348712 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348781 4730 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348795 4730 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348806 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348816 4730 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348829 4730 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348840 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348850 4730 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348862 4730 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348871 4730 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348881 4730 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348890 4730 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348903 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348912 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348921 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348932 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348945 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348955 4730 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348965 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348977 4730 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348986 4730 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348995 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.348988 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349006 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349054 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349070 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349085 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349103 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349113 4730 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349125 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349138 4730 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349151 4730 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349163 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349174 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349184 4730 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349198 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349209 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349220 4730 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349332 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349352 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349365 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349377 4730 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349395 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349409 4730 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349422 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349435 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349664 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349745 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349771 4730 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349784 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349804 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349820 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349835 4730 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349818 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349851 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349861 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349970 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.350127 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.350265 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.350379 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.350441 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.350751 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.350929 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.351275 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.351476 4730 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.351575 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:36.851548416 +0000 UTC m=+21.184808409 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.351948 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.352116 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.352136 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.352238 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.352505 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.352773 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.352912 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.353176 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.353301 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.353298 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.353563 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354358 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354484 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354599 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.354720 4730 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.354805 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:36.854783084 +0000 UTC m=+21.188043077 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.349850 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354851 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354863 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354909 4730 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354921 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354930 4730 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354941 4730 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354951 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354961 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354973 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354969 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354983 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.354995 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.355005 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.355014 4730 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.355024 4730 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.355033 4730 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.355043 4730 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.355052 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.355062 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.355038 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.356139 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.356507 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.356722 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.356852 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.356907 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.355072 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357526 4730 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357553 4730 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357566 4730 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357578 4730 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357589 4730 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357599 4730 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357624 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357636 4730 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357648 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357658 4730 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357676 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357690 4730 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357700 4730 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357710 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357720 4730 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357730 4730 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357741 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357750 4730 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357759 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357769 4730 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357779 4730 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357790 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357801 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357811 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357820 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357830 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357839 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357849 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357858 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357867 4730 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357912 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357923 4730 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357934 4730 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357943 4730 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.357953 4730 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.358162 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.358711 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.358953 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.360782 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.360941 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.361066 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.361155 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.361445 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.362814 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.361717 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.361732 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.362002 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.362262 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.362311 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.362515 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.363077 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.363466 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.363501 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.363805 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.363927 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.364180 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.364745 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.365009 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.365095 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.365156 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.365377 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.365509 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.365922 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.366559 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.366797 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.367095 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.367287 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.367321 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.367397 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.367513 4730 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.367859 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.368308 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.369022 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.369558 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.369633 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.370602 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.370910 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.371122 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.371425 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.371486 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.371843 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.371869 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.371882 4730 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.372296 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:36.872272973 +0000 UTC m=+21.205532966 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.371987 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.372579 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.375151 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.375170 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.375565 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.375661 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.375591 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.376225 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.376373 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.376443 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.376500 4730 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.376629 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:36.876589421 +0000 UTC m=+21.209849414 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.376869 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.376995 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.377258 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.378374 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.378501 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.383109 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.388001 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.388653 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.388784 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.389090 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.389742 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.395864 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.397076 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.397500 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.405245 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.409380 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.410437 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.413785 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.418820 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.419915 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.421503 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.422719 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.423787 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.425231 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.435544 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.436189 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.437586 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.438269 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.440320 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.446233 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.447169 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.447656 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.448914 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.449561 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.450108 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.452090 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.452957 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.454462 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.455220 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.456776 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.457412 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.458155 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.458308 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.458444 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.458497 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.458633 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.458656 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.458667 4730 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.458683 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.458697 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.458703 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: W0930 09:49:36.458816 4730 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.458828 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.459052 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.459190 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.459218 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.458709 4730 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.459924 4730 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.459962 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.459972 4730 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.459982 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.459993 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460003 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460013 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460023 4730 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460033 4730 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460041 4730 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460050 4730 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460058 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460067 4730 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460076 4730 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460086 4730 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460096 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460105 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460114 4730 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460126 4730 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460135 4730 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460146 4730 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460156 4730 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460167 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460178 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460187 4730 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460196 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460206 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460215 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460225 4730 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460234 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460243 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460252 4730 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460263 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460275 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460287 4730 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460296 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460306 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460315 4730 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460325 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460336 4730 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460346 4730 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460354 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460364 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460374 4730 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460384 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460393 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460403 4730 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460413 4730 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460422 4730 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460432 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460441 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460450 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460460 4730 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460469 4730 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460483 4730 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460493 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460502 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460512 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460521 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460531 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460541 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460552 4730 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460560 4730 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460569 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460578 4730 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460586 4730 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460595 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.460604 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461206 4730 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461220 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461231 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461244 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461254 4730 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461265 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461275 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461285 4730 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461294 4730 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461303 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461313 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461323 4730 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461332 4730 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461341 4730 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461351 4730 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461762 4730 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.461882 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.462400 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.463664 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.464194 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.465210 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.466862 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.468152 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.468740 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.470149 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.470985 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.472130 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.472894 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.474031 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.474685 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.475555 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.476279 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.476303 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.477758 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.478796 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.479740 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.480240 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.481241 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.482093 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.482708 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.483679 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.487546 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.505285 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.515709 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.519403 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.526413 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.536446 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.546882 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.560824 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.631050 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.645907 4730 kuberuntime_manager.go:1274] "Unhandled Error" err=< Sep 30 09:49:36 crc kubenswrapper[4730]: container &Container{Name:network-operator,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,Command:[/bin/bash -c #!/bin/bash Sep 30 09:49:36 crc kubenswrapper[4730]: set -o allexport Sep 30 09:49:36 crc kubenswrapper[4730]: if [[ -f /etc/kubernetes/apiserver-url.env ]]; then Sep 30 09:49:36 crc kubenswrapper[4730]: source /etc/kubernetes/apiserver-url.env Sep 30 09:49:36 crc kubenswrapper[4730]: else Sep 30 09:49:36 crc kubenswrapper[4730]: echo "Error: /etc/kubernetes/apiserver-url.env is missing" Sep 30 09:49:36 crc kubenswrapper[4730]: exit 1 Sep 30 09:49:36 crc kubenswrapper[4730]: fi Sep 30 09:49:36 crc kubenswrapper[4730]: exec /usr/bin/cluster-network-operator start --listen=0.0.0.0:9104 Sep 30 09:49:36 crc kubenswrapper[4730]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:cno,HostPort:9104,ContainerPort:9104,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:RELEASE_VERSION,Value:4.18.1,ValueFrom:nil,},EnvVar{Name:KUBE_PROXY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b97554198294bf544fbc116c94a0a1fb2ec8a4de0e926bf9d9e320135f0bee6f,ValueFrom:nil,},EnvVar{Name:KUBE_RBAC_PROXY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09,ValueFrom:nil,},EnvVar{Name:MULTUS_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26,ValueFrom:nil,},EnvVar{Name:MULTUS_ADMISSION_CONTROLLER_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317,ValueFrom:nil,},EnvVar{Name:CNI_PLUGINS_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc,ValueFrom:nil,},EnvVar{Name:BOND_CNI_PLUGIN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78,ValueFrom:nil,},EnvVar{Name:WHEREABOUTS_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4,ValueFrom:nil,},EnvVar{Name:ROUTE_OVERRRIDE_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa,ValueFrom:nil,},EnvVar{Name:MULTUS_NETWORKPOLICY_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:23f833d3738d68706eb2f2868bd76bd71cee016cffa6faf5f045a60cc8c6eddd,ValueFrom:nil,},EnvVar{Name:OVN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,ValueFrom:nil,},EnvVar{Name:OVN_NB_RAFT_ELECTION_TIMER,Value:10,ValueFrom:nil,},EnvVar{Name:OVN_SB_RAFT_ELECTION_TIMER,Value:16,ValueFrom:nil,},EnvVar{Name:OVN_NORTHD_PROBE_INTERVAL,Value:10000,ValueFrom:nil,},EnvVar{Name:OVN_CONTROLLER_INACTIVITY_PROBE,Value:180000,ValueFrom:nil,},EnvVar{Name:OVN_NB_INACTIVITY_PROBE,Value:60000,ValueFrom:nil,},EnvVar{Name:EGRESS_ROUTER_CNI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c,ValueFrom:nil,},EnvVar{Name:NETWORK_METRICS_DAEMON_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d,ValueFrom:nil,},EnvVar{Name:NETWORK_CHECK_SOURCE_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:NETWORK_CHECK_TARGET_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:NETWORK_OPERATOR_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b,ValueFrom:nil,},EnvVar{Name:CLOUD_NETWORK_CONFIG_CONTROLLER_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8048f1cb0be521f09749c0a489503cd56d85b68c6ca93380e082cfd693cd97a8,ValueFrom:nil,},EnvVar{Name:CLI_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2,ValueFrom:nil,},EnvVar{Name:FRR_K8S_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5dbf844e49bb46b78586930149e5e5f5dc121014c8afd10fe36f3651967cc256,ValueFrom:nil,},EnvVar{Name:NETWORKING_CONSOLE_PLUGIN_IMAGE,Value:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd,ValueFrom:nil,},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:host-etc-kube,ReadOnly:true,MountPath:/etc/kubernetes,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-tls,ReadOnly:false,MountPath:/var/run/secrets/serving-cert,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rdwmf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-operator-58b4c7f79c-55gtf_openshift-network-operator(37a5e44f-9a88-4405-be8a-b645485e7312): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Sep 30 09:49:36 crc kubenswrapper[4730]: > logger="UnhandledError" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.647384 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"network-operator\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" podUID="37a5e44f-9a88-4405-be8a-b645485e7312" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.653654 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 09:49:36 crc kubenswrapper[4730]: W0930 09:49:36.664581 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-c4c8c3a6bb5b3455659afaaa2cb717f4bbc9bad2177374d1d9f531a5604ea231 WatchSource:0}: Error finding container c4c8c3a6bb5b3455659afaaa2cb717f4bbc9bad2177374d1d9f531a5604ea231: Status 404 returned error can't find the container with id c4c8c3a6bb5b3455659afaaa2cb717f4bbc9bad2177374d1d9f531a5604ea231 Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.666654 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.666904 4730 kuberuntime_manager.go:1274] "Unhandled Error" err=< Sep 30 09:49:36 crc kubenswrapper[4730]: container &Container{Name:webhook,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,Command:[/bin/bash -c set -xe Sep 30 09:49:36 crc kubenswrapper[4730]: if [[ -f "/env/_master" ]]; then Sep 30 09:49:36 crc kubenswrapper[4730]: set -o allexport Sep 30 09:49:36 crc kubenswrapper[4730]: source "/env/_master" Sep 30 09:49:36 crc kubenswrapper[4730]: set +o allexport Sep 30 09:49:36 crc kubenswrapper[4730]: fi Sep 30 09:49:36 crc kubenswrapper[4730]: # OVN-K will try to remove hybrid overlay node annotations even when the hybrid overlay is not enabled. Sep 30 09:49:36 crc kubenswrapper[4730]: # https://github.com/ovn-org/ovn-kubernetes/blob/ac6820df0b338a246f10f412cd5ec903bd234694/go-controller/pkg/ovn/master.go#L791 Sep 30 09:49:36 crc kubenswrapper[4730]: ho_enable="--enable-hybrid-overlay" Sep 30 09:49:36 crc kubenswrapper[4730]: echo "I$(date "+%m%d %H:%M:%S.%N") - network-node-identity - start webhook" Sep 30 09:49:36 crc kubenswrapper[4730]: # extra-allowed-user: service account `ovn-kubernetes-control-plane` Sep 30 09:49:36 crc kubenswrapper[4730]: # sets pod annotations in multi-homing layer3 network controller (cluster-manager) Sep 30 09:49:36 crc kubenswrapper[4730]: exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 \ Sep 30 09:49:36 crc kubenswrapper[4730]: --webhook-cert-dir="/etc/webhook-cert" \ Sep 30 09:49:36 crc kubenswrapper[4730]: --webhook-host=127.0.0.1 \ Sep 30 09:49:36 crc kubenswrapper[4730]: --webhook-port=9743 \ Sep 30 09:49:36 crc kubenswrapper[4730]: ${ho_enable} \ Sep 30 09:49:36 crc kubenswrapper[4730]: --enable-interconnect \ Sep 30 09:49:36 crc kubenswrapper[4730]: --disable-approver \ Sep 30 09:49:36 crc kubenswrapper[4730]: --extra-allowed-user="system:serviceaccount:openshift-ovn-kubernetes:ovn-kubernetes-control-plane" \ Sep 30 09:49:36 crc kubenswrapper[4730]: --wait-for-kubernetes-api=200s \ Sep 30 09:49:36 crc kubenswrapper[4730]: --pod-admission-conditions="/var/run/ovnkube-identity-config/additional-pod-admission-cond.json" \ Sep 30 09:49:36 crc kubenswrapper[4730]: --loglevel="${LOGLEVEL}" Sep 30 09:49:36 crc kubenswrapper[4730]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LOGLEVEL,Value:2,ValueFrom:nil,},EnvVar{Name:KUBERNETES_NODE_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:spec.nodeName,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/etc/webhook-cert/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:env-overrides,ReadOnly:false,MountPath:/env,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovnkube-identity-cm,ReadOnly:false,MountPath:/var/run/ovnkube-identity-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s2kz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000470000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-node-identity-vrzqb_openshift-network-node-identity(ef543e1b-8068-4ea3-b32a-61027b32e95d): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Sep 30 09:49:36 crc kubenswrapper[4730]: > logger="UnhandledError" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.671398 4730 kuberuntime_manager.go:1274] "Unhandled Error" err=< Sep 30 09:49:36 crc kubenswrapper[4730]: container &Container{Name:approver,Image:quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2,Command:[/bin/bash -c set -xe Sep 30 09:49:36 crc kubenswrapper[4730]: if [[ -f "/env/_master" ]]; then Sep 30 09:49:36 crc kubenswrapper[4730]: set -o allexport Sep 30 09:49:36 crc kubenswrapper[4730]: source "/env/_master" Sep 30 09:49:36 crc kubenswrapper[4730]: set +o allexport Sep 30 09:49:36 crc kubenswrapper[4730]: fi Sep 30 09:49:36 crc kubenswrapper[4730]: Sep 30 09:49:36 crc kubenswrapper[4730]: echo "I$(date "+%m%d %H:%M:%S.%N") - network-node-identity - start approver" Sep 30 09:49:36 crc kubenswrapper[4730]: exec /usr/bin/ovnkube-identity --k8s-apiserver=https://api-int.crc.testing:6443 \ Sep 30 09:49:36 crc kubenswrapper[4730]: --disable-webhook \ Sep 30 09:49:36 crc kubenswrapper[4730]: --csr-acceptance-conditions="/var/run/ovnkube-identity-config/additional-cert-acceptance-cond.json" \ Sep 30 09:49:36 crc kubenswrapper[4730]: --loglevel="${LOGLEVEL}" Sep 30 09:49:36 crc kubenswrapper[4730]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LOGLEVEL,Value:4,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:env-overrides,ReadOnly:false,MountPath:/env,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovnkube-identity-cm,ReadOnly:false,MountPath:/var/run/ovnkube-identity-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s2kz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000470000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod network-node-identity-vrzqb_openshift-network-node-identity(ef543e1b-8068-4ea3-b32a-61027b32e95d): CreateContainerConfigError: services have not yet been read at least once, cannot construct envvars Sep 30 09:49:36 crc kubenswrapper[4730]: > logger="UnhandledError" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.672666 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"webhook\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\", failed to \"StartContainer\" for \"approver\" with CreateContainerConfigError: \"services have not yet been read at least once, cannot construct envvars\"]" pod="openshift-network-node-identity/network-node-identity-vrzqb" podUID="ef543e1b-8068-4ea3-b32a-61027b32e95d" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.672884 4730 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.864387 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.864486 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.864529 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.864557 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:49:37.864530565 +0000 UTC m=+22.197790558 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.864633 4730 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.864731 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:37.864719811 +0000 UTC m=+22.197979804 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.864813 4730 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.864964 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:37.864929186 +0000 UTC m=+22.198189299 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.965042 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:36 crc kubenswrapper[4730]: I0930 09:49:36.965104 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.965265 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.965285 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.965297 4730 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.965311 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.965356 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:37.965339704 +0000 UTC m=+22.298599697 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.965358 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.965376 4730 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:36 crc kubenswrapper[4730]: E0930 09:49:36.965455 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:37.965432737 +0000 UTC m=+22.298692730 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.504484 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c4c8c3a6bb5b3455659afaaa2cb717f4bbc9bad2177374d1d9f531a5604ea231"} Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.507072 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"fd2dfb6c2ed4117d920c46386674aa980de6bd9c089250e66acb5eb2c2990c95"} Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.508216 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ce7fd1818eb4f2c0793c73f518ab160f3352bd473afc2899c3beb9af609431fd"} Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.520039 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.541410 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.555841 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.557828 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-s64nf"] Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.558257 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-s64nf" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.561657 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.563644 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.563885 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.569976 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.584871 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.598655 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.610149 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.624380 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.638402 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.653208 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.675920 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rknv\" (UniqueName: \"kubernetes.io/projected/1a7e6b85-ac68-4da9-b7eb-b5a936f639df-kube-api-access-6rknv\") pod \"node-resolver-s64nf\" (UID: \"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\") " pod="openshift-dns/node-resolver-s64nf" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.675979 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1a7e6b85-ac68-4da9-b7eb-b5a936f639df-hosts-file\") pod \"node-resolver-s64nf\" (UID: \"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\") " pod="openshift-dns/node-resolver-s64nf" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.675936 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.691352 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.713578 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.746160 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.756493 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.777029 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rknv\" (UniqueName: \"kubernetes.io/projected/1a7e6b85-ac68-4da9-b7eb-b5a936f639df-kube-api-access-6rknv\") pod \"node-resolver-s64nf\" (UID: \"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\") " pod="openshift-dns/node-resolver-s64nf" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.777080 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1a7e6b85-ac68-4da9-b7eb-b5a936f639df-hosts-file\") pod \"node-resolver-s64nf\" (UID: \"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\") " pod="openshift-dns/node-resolver-s64nf" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.777163 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1a7e6b85-ac68-4da9-b7eb-b5a936f639df-hosts-file\") pod \"node-resolver-s64nf\" (UID: \"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\") " pod="openshift-dns/node-resolver-s64nf" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.795231 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rknv\" (UniqueName: \"kubernetes.io/projected/1a7e6b85-ac68-4da9-b7eb-b5a936f639df-kube-api-access-6rknv\") pod \"node-resolver-s64nf\" (UID: \"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\") " pod="openshift-dns/node-resolver-s64nf" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.878065 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.878168 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.878233 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:37 crc kubenswrapper[4730]: E0930 09:49:37.878340 4730 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:49:37 crc kubenswrapper[4730]: E0930 09:49:37.878410 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:39.878393903 +0000 UTC m=+24.211653896 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:49:37 crc kubenswrapper[4730]: E0930 09:49:37.878525 4730 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:49:37 crc kubenswrapper[4730]: E0930 09:49:37.878669 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:39.87865875 +0000 UTC m=+24.211918743 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:49:37 crc kubenswrapper[4730]: E0930 09:49:37.878802 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:49:39.878766623 +0000 UTC m=+24.212026636 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.891486 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-s64nf" Sep 30 09:49:37 crc kubenswrapper[4730]: W0930 09:49:37.902021 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a7e6b85_ac68_4da9_b7eb_b5a936f639df.slice/crio-507d6cecce487bebc8013ea0f9674bf242d7931459f6310c1f4ef4f23faa55d5 WatchSource:0}: Error finding container 507d6cecce487bebc8013ea0f9674bf242d7931459f6310c1f4ef4f23faa55d5: Status 404 returned error can't find the container with id 507d6cecce487bebc8013ea0f9674bf242d7931459f6310c1f4ef4f23faa55d5 Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.950646 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-d4zf9"] Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.951073 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.954281 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.954368 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.954892 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-c5vmh"] Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.954946 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.955657 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-t2frc"] Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.955821 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-p4xvk"] Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.955982 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-t2frc" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.956621 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.956891 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.961370 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.963785 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.964278 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.964648 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.969577 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.969691 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.969784 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.969985 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.970083 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.970152 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.970272 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.970418 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.970550 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.972006 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.973093 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.975227 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.979300 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.979934 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.980082 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:37 crc kubenswrapper[4730]: E0930 09:49:37.980320 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:49:37 crc kubenswrapper[4730]: E0930 09:49:37.980416 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:49:37 crc kubenswrapper[4730]: E0930 09:49:37.980495 4730 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:37 crc kubenswrapper[4730]: E0930 09:49:37.980673 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:39.98060019 +0000 UTC m=+24.313860183 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:37 crc kubenswrapper[4730]: E0930 09:49:37.981155 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:49:37 crc kubenswrapper[4730]: E0930 09:49:37.981256 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:49:37 crc kubenswrapper[4730]: E0930 09:49:37.981334 4730 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:37 crc kubenswrapper[4730]: E0930 09:49:37.981812 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:39.981785302 +0000 UTC m=+24.315045295 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:37 crc kubenswrapper[4730]: I0930 09:49:37.996387 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.014479 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.040990 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.061974 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.078688 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.081038 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d975\" (UniqueName: \"kubernetes.io/projected/95bd4436-8399-478d-9552-c9ba5ae8f327-kube-api-access-9d975\") pod \"machine-config-daemon-d4zf9\" (UID: \"95bd4436-8399-478d-9552-c9ba5ae8f327\") " pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.081199 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-systemd-units\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.081306 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-etc-openvswitch\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.081380 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-cni-bin\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.081486 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-hostroot\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.081582 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-run-multus-certs\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.081710 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9871bed2-69f9-44f1-ab80-f8b4b9241e73-system-cni-dir\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.081810 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfshb\" (UniqueName: \"kubernetes.io/projected/823c4c28-801d-421e-b15f-02a17e300753-kube-api-access-jfshb\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.081923 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-env-overrides\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.082051 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-ovnkube-script-lib\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.082141 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-openvswitch\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.082232 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/823c4c28-801d-421e-b15f-02a17e300753-ovn-node-metrics-cert\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.082335 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-var-lib-cni-bin\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.082442 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/95bd4436-8399-478d-9552-c9ba5ae8f327-mcd-auth-proxy-config\") pod \"machine-config-daemon-d4zf9\" (UID: \"95bd4436-8399-478d-9552-c9ba5ae8f327\") " pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.082580 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-multus-cni-dir\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.082741 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-os-release\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.082859 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-etc-kubernetes\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.082986 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9871bed2-69f9-44f1-ab80-f8b4b9241e73-cnibin\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.083105 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/98a6f8df-1ac8-4652-8074-90cb180311ad-cni-binary-copy\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.083236 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-run-k8s-cni-cncf-io\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.083365 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-run-ovn-kubernetes\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.083505 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.083653 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txcfr\" (UniqueName: \"kubernetes.io/projected/98a6f8df-1ac8-4652-8074-90cb180311ad-kube-api-access-txcfr\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.083789 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/95bd4436-8399-478d-9552-c9ba5ae8f327-rootfs\") pod \"machine-config-daemon-d4zf9\" (UID: \"95bd4436-8399-478d-9552-c9ba5ae8f327\") " pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.083915 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-log-socket\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.084054 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/98a6f8df-1ac8-4652-8074-90cb180311ad-multus-daemon-config\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.084202 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9871bed2-69f9-44f1-ab80-f8b4b9241e73-tuning-conf-dir\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.084338 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9871bed2-69f9-44f1-ab80-f8b4b9241e73-os-release\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.084454 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-var-lib-cni-multus\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.084570 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-var-lib-kubelet\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.084752 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-systemd\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.084887 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-run-netns\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.085018 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9871bed2-69f9-44f1-ab80-f8b4b9241e73-cni-binary-copy\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.085151 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-472bs\" (UniqueName: \"kubernetes.io/projected/9871bed2-69f9-44f1-ab80-f8b4b9241e73-kube-api-access-472bs\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.085263 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-kubelet\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.085378 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-run-netns\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.085490 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-ovn\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.085581 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-node-log\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.085705 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9871bed2-69f9-44f1-ab80-f8b4b9241e73-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.085853 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-cnibin\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.085982 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-multus-socket-dir-parent\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.086122 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-multus-conf-dir\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.086243 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-system-cni-dir\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.086374 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-var-lib-openvswitch\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.086497 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-cni-netd\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.086655 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-ovnkube-config\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.086783 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/95bd4436-8399-478d-9552-c9ba5ae8f327-proxy-tls\") pod \"machine-config-daemon-d4zf9\" (UID: \"95bd4436-8399-478d-9552-c9ba5ae8f327\") " pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.086906 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-slash\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.093056 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.105365 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.118858 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.131957 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.145742 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.159302 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.171564 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.181522 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.188404 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d975\" (UniqueName: \"kubernetes.io/projected/95bd4436-8399-478d-9552-c9ba5ae8f327-kube-api-access-9d975\") pod \"machine-config-daemon-d4zf9\" (UID: \"95bd4436-8399-478d-9552-c9ba5ae8f327\") " pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.188752 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-systemd-units\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.188879 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-etc-openvswitch\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.188951 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-etc-openvswitch\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.188929 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-systemd-units\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.189094 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-cni-bin\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.189209 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-hostroot\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.189349 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-run-multus-certs\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.189437 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-run-multus-certs\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.189109 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-cni-bin\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.189279 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-hostroot\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.189664 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9871bed2-69f9-44f1-ab80-f8b4b9241e73-system-cni-dir\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.189765 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9871bed2-69f9-44f1-ab80-f8b4b9241e73-system-cni-dir\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.189885 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfshb\" (UniqueName: \"kubernetes.io/projected/823c4c28-801d-421e-b15f-02a17e300753-kube-api-access-jfshb\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.189997 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-env-overrides\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.190139 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-ovnkube-script-lib\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.190262 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-openvswitch\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.190379 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/823c4c28-801d-421e-b15f-02a17e300753-ovn-node-metrics-cert\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.190495 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-var-lib-cni-bin\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.190597 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-var-lib-cni-bin\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.190752 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/95bd4436-8399-478d-9552-c9ba5ae8f327-mcd-auth-proxy-config\") pod \"machine-config-daemon-d4zf9\" (UID: \"95bd4436-8399-478d-9552-c9ba5ae8f327\") " pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.190960 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9871bed2-69f9-44f1-ab80-f8b4b9241e73-cnibin\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191081 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-multus-cni-dir\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191179 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-os-release\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191278 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-etc-kubernetes\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191375 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-etc-kubernetes\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.190329 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-openvswitch\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191344 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/95bd4436-8399-478d-9552-c9ba5ae8f327-mcd-auth-proxy-config\") pod \"machine-config-daemon-d4zf9\" (UID: \"95bd4436-8399-478d-9552-c9ba5ae8f327\") " pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191184 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-ovnkube-script-lib\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191377 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/98a6f8df-1ac8-4652-8074-90cb180311ad-cni-binary-copy\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191441 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-multus-cni-dir\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191479 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-run-k8s-cni-cncf-io\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191087 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9871bed2-69f9-44f1-ab80-f8b4b9241e73-cnibin\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191460 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-run-k8s-cni-cncf-io\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191449 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-os-release\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191519 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-run-ovn-kubernetes\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191535 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-run-ovn-kubernetes\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191570 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191601 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txcfr\" (UniqueName: \"kubernetes.io/projected/98a6f8df-1ac8-4652-8074-90cb180311ad-kube-api-access-txcfr\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191639 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9871bed2-69f9-44f1-ab80-f8b4b9241e73-tuning-conf-dir\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191660 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/95bd4436-8399-478d-9552-c9ba5ae8f327-rootfs\") pod \"machine-config-daemon-d4zf9\" (UID: \"95bd4436-8399-478d-9552-c9ba5ae8f327\") " pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191682 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-log-socket\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191699 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/98a6f8df-1ac8-4652-8074-90cb180311ad-multus-daemon-config\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191729 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9871bed2-69f9-44f1-ab80-f8b4b9241e73-os-release\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191746 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-var-lib-cni-multus\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191765 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-systemd\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191781 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-run-netns\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191799 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-var-lib-kubelet\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191804 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/95bd4436-8399-478d-9552-c9ba5ae8f327-rootfs\") pod \"machine-config-daemon-d4zf9\" (UID: \"95bd4436-8399-478d-9552-c9ba5ae8f327\") " pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191820 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9871bed2-69f9-44f1-ab80-f8b4b9241e73-cni-binary-copy\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191840 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9871bed2-69f9-44f1-ab80-f8b4b9241e73-os-release\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191845 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-472bs\" (UniqueName: \"kubernetes.io/projected/9871bed2-69f9-44f1-ab80-f8b4b9241e73-kube-api-access-472bs\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191897 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-systemd\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191910 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-env-overrides\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191918 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-kubelet\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191927 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-run-netns\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191950 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-run-netns\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191978 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-ovn\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192000 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-node-log\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192040 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-var-lib-kubelet\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192094 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-kubelet\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192047 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9871bed2-69f9-44f1-ab80-f8b4b9241e73-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191874 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-host-var-lib-cni-multus\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192183 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-ovn\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192184 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-cnibin\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192231 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-cnibin\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192235 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-system-cni-dir\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192283 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-run-netns\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192323 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-node-log\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192326 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-multus-socket-dir-parent\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192352 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-multus-conf-dir\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192362 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-multus-socket-dir-parent\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192376 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/95bd4436-8399-478d-9552-c9ba5ae8f327-proxy-tls\") pod \"machine-config-daemon-d4zf9\" (UID: \"95bd4436-8399-478d-9552-c9ba5ae8f327\") " pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192390 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-multus-conf-dir\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192398 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-slash\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192421 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-var-lib-openvswitch\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192441 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-cni-netd\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192460 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-ovnkube-config\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192499 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9871bed2-69f9-44f1-ab80-f8b4b9241e73-cni-binary-copy\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192544 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-var-lib-openvswitch\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192571 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-slash\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192328 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/98a6f8df-1ac8-4652-8074-90cb180311ad-system-cni-dir\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.191989 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192654 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-cni-netd\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192798 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9871bed2-69f9-44f1-ab80-f8b4b9241e73-tuning-conf-dir\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192905 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/98a6f8df-1ac8-4652-8074-90cb180311ad-multus-daemon-config\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192955 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9871bed2-69f9-44f1-ab80-f8b4b9241e73-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.192981 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-log-socket\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.193021 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.193178 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-ovnkube-config\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.194160 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/823c4c28-801d-421e-b15f-02a17e300753-ovn-node-metrics-cert\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.194547 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/98a6f8df-1ac8-4652-8074-90cb180311ad-cni-binary-copy\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.196267 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/95bd4436-8399-478d-9552-c9ba5ae8f327-proxy-tls\") pod \"machine-config-daemon-d4zf9\" (UID: \"95bd4436-8399-478d-9552-c9ba5ae8f327\") " pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.206166 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfshb\" (UniqueName: \"kubernetes.io/projected/823c4c28-801d-421e-b15f-02a17e300753-kube-api-access-jfshb\") pod \"ovnkube-node-c5vmh\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.207880 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-472bs\" (UniqueName: \"kubernetes.io/projected/9871bed2-69f9-44f1-ab80-f8b4b9241e73-kube-api-access-472bs\") pod \"multus-additional-cni-plugins-p4xvk\" (UID: \"9871bed2-69f9-44f1-ab80-f8b4b9241e73\") " pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.208358 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.209133 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d975\" (UniqueName: \"kubernetes.io/projected/95bd4436-8399-478d-9552-c9ba5ae8f327-kube-api-access-9d975\") pod \"machine-config-daemon-d4zf9\" (UID: \"95bd4436-8399-478d-9552-c9ba5ae8f327\") " pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.209157 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txcfr\" (UniqueName: \"kubernetes.io/projected/98a6f8df-1ac8-4652-8074-90cb180311ad-kube-api-access-txcfr\") pod \"multus-t2frc\" (UID: \"98a6f8df-1ac8-4652-8074-90cb180311ad\") " pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.219551 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.230704 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.244645 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.272604 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.274759 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.284255 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-t2frc" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.286186 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.302656 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.311975 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" Sep 30 09:49:38 crc kubenswrapper[4730]: W0930 09:49:38.319502 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod823c4c28_801d_421e_b15f_02a17e300753.slice/crio-b495b3733f13e0b9f0b0ad95249d30857e31a0f6e908b5ff564a45f562b85018 WatchSource:0}: Error finding container b495b3733f13e0b9f0b0ad95249d30857e31a0f6e908b5ff564a45f562b85018: Status 404 returned error can't find the container with id b495b3733f13e0b9f0b0ad95249d30857e31a0f6e908b5ff564a45f562b85018 Sep 30 09:49:38 crc kubenswrapper[4730]: W0930 09:49:38.334507 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9871bed2_69f9_44f1_ab80_f8b4b9241e73.slice/crio-134cc080ebfb94d3e115bcb7d1da20f92db546031a11d0a19967da1e50011e6d WatchSource:0}: Error finding container 134cc080ebfb94d3e115bcb7d1da20f92db546031a11d0a19967da1e50011e6d: Status 404 returned error can't find the container with id 134cc080ebfb94d3e115bcb7d1da20f92db546031a11d0a19967da1e50011e6d Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.380651 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:38 crc kubenswrapper[4730]: E0930 09:49:38.380963 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.381171 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.381304 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:38 crc kubenswrapper[4730]: E0930 09:49:38.381317 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:49:38 crc kubenswrapper[4730]: E0930 09:49:38.381467 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.391579 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.515925 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-s64nf" event={"ID":"1a7e6b85-ac68-4da9-b7eb-b5a936f639df","Type":"ContainerStarted","Data":"a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3"} Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.515988 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-s64nf" event={"ID":"1a7e6b85-ac68-4da9-b7eb-b5a936f639df","Type":"ContainerStarted","Data":"507d6cecce487bebc8013ea0f9674bf242d7931459f6310c1f4ef4f23faa55d5"} Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.520634 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8"} Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.520691 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424"} Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.522756 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerStarted","Data":"b495b3733f13e0b9f0b0ad95249d30857e31a0f6e908b5ff564a45f562b85018"} Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.525840 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973"} Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.528322 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" event={"ID":"9871bed2-69f9-44f1-ab80-f8b4b9241e73","Type":"ContainerStarted","Data":"134cc080ebfb94d3e115bcb7d1da20f92db546031a11d0a19967da1e50011e6d"} Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.529292 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.531405 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t2frc" event={"ID":"98a6f8df-1ac8-4652-8074-90cb180311ad","Type":"ContainerStarted","Data":"8219ce3adfefb56e2f36e3d108aa7500d5b9b56e9e3acdbb9f81a4464fd4b773"} Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.533823 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"5827ecaa2b79de9664ceea2a12db7169e1a135e2ecea810f76a29ac9ed79b400"} Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.541573 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.556325 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.568699 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.578593 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.593335 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.606182 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.617285 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.629629 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.643900 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.654100 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.678138 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"services have not yet been read at least once, cannot construct envvars\\\",\\\"reason\\\":\\\"CreateContainerConfigError\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.697025 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.711944 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.723772 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.735851 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.751655 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.763831 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.773063 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.782604 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.801857 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.814443 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.824872 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:38 crc kubenswrapper[4730]: I0930 09:49:38.838023 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.540857 4730 generic.go:334] "Generic (PLEG): container finished" podID="9871bed2-69f9-44f1-ab80-f8b4b9241e73" containerID="84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7" exitCode=0 Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.540932 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" event={"ID":"9871bed2-69f9-44f1-ab80-f8b4b9241e73","Type":"ContainerDied","Data":"84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7"} Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.542859 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t2frc" event={"ID":"98a6f8df-1ac8-4652-8074-90cb180311ad","Type":"ContainerStarted","Data":"ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d"} Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.545489 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b"} Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.545553 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6"} Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.547280 4730 generic.go:334] "Generic (PLEG): container finished" podID="823c4c28-801d-421e-b15f-02a17e300753" containerID="e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f" exitCode=0 Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.547318 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f"} Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.560262 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.576210 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.593345 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.609454 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.631881 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.653135 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.669212 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.680815 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.707829 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.726267 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.742441 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.758540 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.772045 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.788924 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.807067 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.825660 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.852714 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.868636 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.883688 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.900083 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.914892 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.915018 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.915076 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:39 crc kubenswrapper[4730]: E0930 09:49:39.915121 4730 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:49:39 crc kubenswrapper[4730]: E0930 09:49:39.915131 4730 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:49:39 crc kubenswrapper[4730]: E0930 09:49:39.915152 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:49:43.915097983 +0000 UTC m=+28.248358096 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:49:39 crc kubenswrapper[4730]: E0930 09:49:39.915211 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:43.915198056 +0000 UTC m=+28.248458239 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:49:39 crc kubenswrapper[4730]: E0930 09:49:39.915246 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:43.915229077 +0000 UTC m=+28.248489260 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.925740 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.942393 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.956745 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:39 crc kubenswrapper[4730]: I0930 09:49:39.974416 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:39Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.015845 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.015899 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:40 crc kubenswrapper[4730]: E0930 09:49:40.016060 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:49:40 crc kubenswrapper[4730]: E0930 09:49:40.016090 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:49:40 crc kubenswrapper[4730]: E0930 09:49:40.016107 4730 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:40 crc kubenswrapper[4730]: E0930 09:49:40.016178 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:44.016159699 +0000 UTC m=+28.349419702 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:40 crc kubenswrapper[4730]: E0930 09:49:40.016527 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:49:40 crc kubenswrapper[4730]: E0930 09:49:40.016684 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:49:40 crc kubenswrapper[4730]: E0930 09:49:40.016777 4730 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:40 crc kubenswrapper[4730]: E0930 09:49:40.016938 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:44.01690975 +0000 UTC m=+28.350169923 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.380862 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.381041 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:40 crc kubenswrapper[4730]: E0930 09:49:40.381116 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:49:40 crc kubenswrapper[4730]: E0930 09:49:40.381035 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.381179 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:40 crc kubenswrapper[4730]: E0930 09:49:40.381383 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.554580 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049"} Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.563168 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerStarted","Data":"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda"} Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.563255 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerStarted","Data":"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323"} Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.563272 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerStarted","Data":"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494"} Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.563284 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerStarted","Data":"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541"} Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.563294 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerStarted","Data":"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc"} Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.563307 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerStarted","Data":"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d"} Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.566929 4730 generic.go:334] "Generic (PLEG): container finished" podID="9871bed2-69f9-44f1-ab80-f8b4b9241e73" containerID="618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7" exitCode=0 Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.567923 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" event={"ID":"9871bed2-69f9-44f1-ab80-f8b4b9241e73","Type":"ContainerDied","Data":"618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7"} Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.570129 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.574081 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-nw55k"] Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.574515 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-nw55k" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.582917 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.585030 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.586997 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.591423 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.613853 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.651342 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.691291 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.720722 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.731057 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qdpm\" (UniqueName: \"kubernetes.io/projected/c0a99445-b60e-4c47-b4ae-00b9983d8a15-kube-api-access-7qdpm\") pod \"node-ca-nw55k\" (UID: \"c0a99445-b60e-4c47-b4ae-00b9983d8a15\") " pod="openshift-image-registry/node-ca-nw55k" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.731160 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c0a99445-b60e-4c47-b4ae-00b9983d8a15-host\") pod \"node-ca-nw55k\" (UID: \"c0a99445-b60e-4c47-b4ae-00b9983d8a15\") " pod="openshift-image-registry/node-ca-nw55k" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.731198 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c0a99445-b60e-4c47-b4ae-00b9983d8a15-serviceca\") pod \"node-ca-nw55k\" (UID: \"c0a99445-b60e-4c47-b4ae-00b9983d8a15\") " pod="openshift-image-registry/node-ca-nw55k" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.742048 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.762106 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.779428 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.800931 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.814317 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.829436 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.832017 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qdpm\" (UniqueName: \"kubernetes.io/projected/c0a99445-b60e-4c47-b4ae-00b9983d8a15-kube-api-access-7qdpm\") pod \"node-ca-nw55k\" (UID: \"c0a99445-b60e-4c47-b4ae-00b9983d8a15\") " pod="openshift-image-registry/node-ca-nw55k" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.832072 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c0a99445-b60e-4c47-b4ae-00b9983d8a15-host\") pod \"node-ca-nw55k\" (UID: \"c0a99445-b60e-4c47-b4ae-00b9983d8a15\") " pod="openshift-image-registry/node-ca-nw55k" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.832093 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c0a99445-b60e-4c47-b4ae-00b9983d8a15-serviceca\") pod \"node-ca-nw55k\" (UID: \"c0a99445-b60e-4c47-b4ae-00b9983d8a15\") " pod="openshift-image-registry/node-ca-nw55k" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.832155 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c0a99445-b60e-4c47-b4ae-00b9983d8a15-host\") pod \"node-ca-nw55k\" (UID: \"c0a99445-b60e-4c47-b4ae-00b9983d8a15\") " pod="openshift-image-registry/node-ca-nw55k" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.833189 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c0a99445-b60e-4c47-b4ae-00b9983d8a15-serviceca\") pod \"node-ca-nw55k\" (UID: \"c0a99445-b60e-4c47-b4ae-00b9983d8a15\") " pod="openshift-image-registry/node-ca-nw55k" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.853036 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.860366 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qdpm\" (UniqueName: \"kubernetes.io/projected/c0a99445-b60e-4c47-b4ae-00b9983d8a15-kube-api-access-7qdpm\") pod \"node-ca-nw55k\" (UID: \"c0a99445-b60e-4c47-b4ae-00b9983d8a15\") " pod="openshift-image-registry/node-ca-nw55k" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.878455 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.902472 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.918683 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.936428 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.954008 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:40 crc kubenswrapper[4730]: I0930 09:49:40.971156 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:40Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.010340 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.024513 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.042820 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.061191 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-nw55k" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.067521 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: W0930 09:49:41.082402 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0a99445_b60e_4c47_b4ae_00b9983d8a15.slice/crio-f99b18aa84f3480bfb7e68b32fc8a18455bad67ffe463979818ecfbd27e663a5 WatchSource:0}: Error finding container f99b18aa84f3480bfb7e68b32fc8a18455bad67ffe463979818ecfbd27e663a5: Status 404 returned error can't find the container with id f99b18aa84f3480bfb7e68b32fc8a18455bad67ffe463979818ecfbd27e663a5 Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.084020 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.100525 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.116971 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.239835 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.262661 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.276808 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.290143 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.302259 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.313297 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.329562 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.344918 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.358437 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.370312 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.388231 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.402433 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.416375 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.429906 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.577511 4730 generic.go:334] "Generic (PLEG): container finished" podID="9871bed2-69f9-44f1-ab80-f8b4b9241e73" containerID="849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e" exitCode=0 Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.577631 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" event={"ID":"9871bed2-69f9-44f1-ab80-f8b4b9241e73","Type":"ContainerDied","Data":"849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e"} Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.580361 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-nw55k" event={"ID":"c0a99445-b60e-4c47-b4ae-00b9983d8a15","Type":"ContainerStarted","Data":"b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca"} Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.580450 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-nw55k" event={"ID":"c0a99445-b60e-4c47-b4ae-00b9983d8a15","Type":"ContainerStarted","Data":"f99b18aa84f3480bfb7e68b32fc8a18455bad67ffe463979818ecfbd27e663a5"} Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.593542 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.608407 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.629007 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.644346 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.661285 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.674437 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.687093 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.702556 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.714923 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.728771 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.745062 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.752236 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.758262 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.761865 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.770402 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.782768 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.797295 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.817645 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.835873 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.848591 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.862912 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.879349 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.893493 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.906379 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.934948 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:41 crc kubenswrapper[4730]: I0930 09:49:41.974127 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:41Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.024052 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.058351 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.098049 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.139565 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.230054 4730 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.232422 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.232474 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.232484 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.232604 4730 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.239949 4730 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.240228 4730 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.241598 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.241806 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.241908 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.242057 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.242156 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:42Z","lastTransitionTime":"2025-09-30T09:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:42 crc kubenswrapper[4730]: E0930 09:49:42.256408 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.265723 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.265782 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.265799 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.265819 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.265832 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:42Z","lastTransitionTime":"2025-09-30T09:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:42 crc kubenswrapper[4730]: E0930 09:49:42.279795 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.284563 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.284639 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.284656 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.284673 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.284686 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:42Z","lastTransitionTime":"2025-09-30T09:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:42 crc kubenswrapper[4730]: E0930 09:49:42.299799 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.305674 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.305949 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.305972 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.306004 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.306027 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:42Z","lastTransitionTime":"2025-09-30T09:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:42 crc kubenswrapper[4730]: E0930 09:49:42.322873 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.367155 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.367224 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.367238 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.367263 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.367283 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:42Z","lastTransitionTime":"2025-09-30T09:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.380817 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:42 crc kubenswrapper[4730]: E0930 09:49:42.380988 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.381109 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:42 crc kubenswrapper[4730]: E0930 09:49:42.381331 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.381133 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:42 crc kubenswrapper[4730]: E0930 09:49:42.381603 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:49:42 crc kubenswrapper[4730]: E0930 09:49:42.382074 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: E0930 09:49:42.382224 4730 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.384349 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.384459 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.384555 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.384683 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.384762 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:42Z","lastTransitionTime":"2025-09-30T09:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.489825 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.490291 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.490307 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.490326 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.490339 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:42Z","lastTransitionTime":"2025-09-30T09:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.589437 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerStarted","Data":"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2"} Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.592432 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.592472 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.592483 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.592499 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.592509 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:42Z","lastTransitionTime":"2025-09-30T09:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.593412 4730 generic.go:334] "Generic (PLEG): container finished" podID="9871bed2-69f9-44f1-ab80-f8b4b9241e73" containerID="685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59" exitCode=0 Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.593967 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" event={"ID":"9871bed2-69f9-44f1-ab80-f8b4b9241e73","Type":"ContainerDied","Data":"685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59"} Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.622218 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.642640 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.659900 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.678212 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.692929 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.694845 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.694894 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.694909 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.694932 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.694946 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:42Z","lastTransitionTime":"2025-09-30T09:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.708120 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.724174 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.741934 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.759090 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.775535 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.791745 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.797329 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.797366 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.797377 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.797397 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.797411 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:42Z","lastTransitionTime":"2025-09-30T09:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.807924 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.823780 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.854029 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:42Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.900783 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.900843 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.900857 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.900880 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:42 crc kubenswrapper[4730]: I0930 09:49:42.900894 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:42Z","lastTransitionTime":"2025-09-30T09:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.003226 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.003816 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.003837 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.003860 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.003872 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:43Z","lastTransitionTime":"2025-09-30T09:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.106721 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.106783 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.106796 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.106816 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.106830 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:43Z","lastTransitionTime":"2025-09-30T09:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.209465 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.209512 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.209529 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.209546 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.209558 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:43Z","lastTransitionTime":"2025-09-30T09:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.313176 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.313213 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.313225 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.313243 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.313257 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:43Z","lastTransitionTime":"2025-09-30T09:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.416600 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.416674 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.416685 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.416703 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.416714 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:43Z","lastTransitionTime":"2025-09-30T09:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.520408 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.520460 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.520470 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.520487 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.520497 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:43Z","lastTransitionTime":"2025-09-30T09:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.602045 4730 generic.go:334] "Generic (PLEG): container finished" podID="9871bed2-69f9-44f1-ab80-f8b4b9241e73" containerID="3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d" exitCode=0 Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.602119 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" event={"ID":"9871bed2-69f9-44f1-ab80-f8b4b9241e73","Type":"ContainerDied","Data":"3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d"} Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.622554 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.622600 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.622629 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.622654 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.622673 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:43Z","lastTransitionTime":"2025-09-30T09:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.626186 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.641780 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.654484 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.665447 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.675955 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.687268 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.702305 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.716057 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.724788 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.724831 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.724844 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.724862 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.724873 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:43Z","lastTransitionTime":"2025-09-30T09:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.730334 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.739859 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.753366 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.770713 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.787575 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.807854 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:43Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.827969 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.828006 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.828015 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.828030 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.828040 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:43Z","lastTransitionTime":"2025-09-30T09:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.930883 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.930943 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.930954 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.930975 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.930985 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:43Z","lastTransitionTime":"2025-09-30T09:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.986309 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:49:43 crc kubenswrapper[4730]: E0930 09:49:43.986639 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:49:51.986566971 +0000 UTC m=+36.319826974 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.986705 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:43 crc kubenswrapper[4730]: I0930 09:49:43.986745 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:43 crc kubenswrapper[4730]: E0930 09:49:43.986823 4730 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:49:43 crc kubenswrapper[4730]: E0930 09:49:43.986901 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:51.986884729 +0000 UTC m=+36.320144732 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:49:43 crc kubenswrapper[4730]: E0930 09:49:43.986901 4730 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:49:43 crc kubenswrapper[4730]: E0930 09:49:43.986952 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:51.986944531 +0000 UTC m=+36.320204534 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.037187 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.037243 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.037259 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.037277 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.037288 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:44Z","lastTransitionTime":"2025-09-30T09:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.088070 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.088146 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:44 crc kubenswrapper[4730]: E0930 09:49:44.088293 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:49:44 crc kubenswrapper[4730]: E0930 09:49:44.088329 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:49:44 crc kubenswrapper[4730]: E0930 09:49:44.088360 4730 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:44 crc kubenswrapper[4730]: E0930 09:49:44.088293 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:49:44 crc kubenswrapper[4730]: E0930 09:49:44.088420 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:52.088399967 +0000 UTC m=+36.421659960 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:44 crc kubenswrapper[4730]: E0930 09:49:44.088424 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:49:44 crc kubenswrapper[4730]: E0930 09:49:44.088440 4730 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:44 crc kubenswrapper[4730]: E0930 09:49:44.088475 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:52.088461629 +0000 UTC m=+36.421721622 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.139799 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.139864 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.139877 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.139896 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.139911 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:44Z","lastTransitionTime":"2025-09-30T09:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.242913 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.242994 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.243007 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.243027 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.243040 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:44Z","lastTransitionTime":"2025-09-30T09:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.346304 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.346364 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.346377 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.346398 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.346412 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:44Z","lastTransitionTime":"2025-09-30T09:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.380253 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.380332 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.380843 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:44 crc kubenswrapper[4730]: E0930 09:49:44.381027 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:49:44 crc kubenswrapper[4730]: E0930 09:49:44.381186 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:49:44 crc kubenswrapper[4730]: E0930 09:49:44.381315 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.450225 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.450286 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.450306 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.450334 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.450354 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:44Z","lastTransitionTime":"2025-09-30T09:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.554139 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.554188 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.554207 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.554231 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.554248 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:44Z","lastTransitionTime":"2025-09-30T09:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.609301 4730 generic.go:334] "Generic (PLEG): container finished" podID="9871bed2-69f9-44f1-ab80-f8b4b9241e73" containerID="840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878" exitCode=0 Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.609365 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" event={"ID":"9871bed2-69f9-44f1-ab80-f8b4b9241e73","Type":"ContainerDied","Data":"840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878"} Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.624812 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.638215 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.651352 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.656568 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.656656 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.656674 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.656696 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.656710 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:44Z","lastTransitionTime":"2025-09-30T09:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.666075 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.680471 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.694423 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.706686 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.721682 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.743833 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.760305 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.761428 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.761470 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.761480 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.761504 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.761516 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:44Z","lastTransitionTime":"2025-09-30T09:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.775268 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.789151 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.804461 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.820220 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.864664 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.864731 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.864740 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.864757 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.864770 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:44Z","lastTransitionTime":"2025-09-30T09:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.967649 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.967719 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.967733 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.967753 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:44 crc kubenswrapper[4730]: I0930 09:49:44.967765 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:44Z","lastTransitionTime":"2025-09-30T09:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.071506 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.071579 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.071595 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.071641 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.071659 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:45Z","lastTransitionTime":"2025-09-30T09:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.179457 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.179508 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.179521 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.179538 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.179552 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:45Z","lastTransitionTime":"2025-09-30T09:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.283764 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.283821 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.283840 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.283859 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.283873 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:45Z","lastTransitionTime":"2025-09-30T09:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.388487 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.389041 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.389057 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.389082 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.389097 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:45Z","lastTransitionTime":"2025-09-30T09:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.491851 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.491903 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.491916 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.491936 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.491953 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:45Z","lastTransitionTime":"2025-09-30T09:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.595024 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.595284 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.595304 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.595360 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.595387 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:45Z","lastTransitionTime":"2025-09-30T09:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.619525 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" event={"ID":"9871bed2-69f9-44f1-ab80-f8b4b9241e73","Type":"ContainerStarted","Data":"19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186"} Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.627259 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerStarted","Data":"dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092"} Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.627964 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.628030 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.636592 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.653870 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.711905 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.712308 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.712450 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.712567 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.712716 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:45Z","lastTransitionTime":"2025-09-30T09:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.711938 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.717387 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.717702 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.736675 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.755176 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.771201 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.784799 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.804261 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.815710 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.815764 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.815776 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.815799 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.815818 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:45Z","lastTransitionTime":"2025-09-30T09:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.821007 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.833566 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.846976 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.858313 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.873406 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.888219 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.901128 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.916014 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.918515 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.918570 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.918583 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.918624 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.918637 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:45Z","lastTransitionTime":"2025-09-30T09:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.930174 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.945977 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.960371 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.974414 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:45 crc kubenswrapper[4730]: I0930 09:49:45.986770 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.001390 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:45Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.011606 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.021893 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.022125 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.022364 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.022377 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.022397 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.022411 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:46Z","lastTransitionTime":"2025-09-30T09:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.036968 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.051036 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.064053 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.083176 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.126109 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.126155 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.126169 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.126191 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.126203 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:46Z","lastTransitionTime":"2025-09-30T09:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.230491 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.230569 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.230586 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.230625 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.230643 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:46Z","lastTransitionTime":"2025-09-30T09:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.332927 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.333257 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.333392 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.333568 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.333742 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:46Z","lastTransitionTime":"2025-09-30T09:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.381018 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:46 crc kubenswrapper[4730]: E0930 09:49:46.381588 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.382445 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:46 crc kubenswrapper[4730]: E0930 09:49:46.382797 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.382853 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:46 crc kubenswrapper[4730]: E0930 09:49:46.383337 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.404035 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.419706 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.435987 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.436752 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.436877 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.436957 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.437037 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.437129 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:46Z","lastTransitionTime":"2025-09-30T09:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.450852 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.466646 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.480471 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.495962 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.513106 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.532056 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.539220 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.539265 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.539280 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.539310 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.539326 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:46Z","lastTransitionTime":"2025-09-30T09:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.547070 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.574791 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.592550 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.608258 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.625214 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.631249 4730 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.642187 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.642369 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.642447 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.642518 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.642598 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:46Z","lastTransitionTime":"2025-09-30T09:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.745532 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.745650 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.745673 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.745699 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.745716 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:46Z","lastTransitionTime":"2025-09-30T09:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.848670 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.849057 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.849156 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.849250 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.849399 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:46Z","lastTransitionTime":"2025-09-30T09:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.956126 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.956210 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.956229 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.956260 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:46 crc kubenswrapper[4730]: I0930 09:49:46.956274 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:46Z","lastTransitionTime":"2025-09-30T09:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.060125 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.060196 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.060219 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.060251 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.060272 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:47Z","lastTransitionTime":"2025-09-30T09:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.162668 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.162712 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.162726 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.162745 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.162758 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:47Z","lastTransitionTime":"2025-09-30T09:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.265867 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.265933 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.265966 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.265991 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.266010 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:47Z","lastTransitionTime":"2025-09-30T09:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.369065 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.369105 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.369115 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.369132 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.369142 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:47Z","lastTransitionTime":"2025-09-30T09:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.471808 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.471855 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.471867 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.471885 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.471898 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:47Z","lastTransitionTime":"2025-09-30T09:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.574806 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.574861 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.574871 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.574888 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.574901 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:47Z","lastTransitionTime":"2025-09-30T09:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.635556 4730 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.677310 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.677374 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.677387 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.677413 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.677426 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:47Z","lastTransitionTime":"2025-09-30T09:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.779815 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.779866 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.779877 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.779894 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.779903 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:47Z","lastTransitionTime":"2025-09-30T09:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.882422 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.882474 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.882484 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.882504 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.882516 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:47Z","lastTransitionTime":"2025-09-30T09:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.984445 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.984499 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.984513 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.984533 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:47 crc kubenswrapper[4730]: I0930 09:49:47.984547 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:47Z","lastTransitionTime":"2025-09-30T09:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.087013 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.087080 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.087093 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.087117 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.087133 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:48Z","lastTransitionTime":"2025-09-30T09:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.190100 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.190150 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.190166 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.190185 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.190200 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:48Z","lastTransitionTime":"2025-09-30T09:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.293190 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.293245 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.293259 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.293280 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.293290 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:48Z","lastTransitionTime":"2025-09-30T09:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.380711 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.380748 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:48 crc kubenswrapper[4730]: E0930 09:49:48.381396 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.380766 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:48 crc kubenswrapper[4730]: E0930 09:49:48.381505 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:49:48 crc kubenswrapper[4730]: E0930 09:49:48.381587 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.397087 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.397127 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.397138 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.397156 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.397168 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:48Z","lastTransitionTime":"2025-09-30T09:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.499966 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.500021 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.500033 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.500055 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.500069 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:48Z","lastTransitionTime":"2025-09-30T09:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.602665 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.602719 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.602729 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.602747 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.602760 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:48Z","lastTransitionTime":"2025-09-30T09:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.641381 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/0.log" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.644900 4730 generic.go:334] "Generic (PLEG): container finished" podID="823c4c28-801d-421e-b15f-02a17e300753" containerID="dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092" exitCode=1 Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.644958 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092"} Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.646226 4730 scope.go:117] "RemoveContainer" containerID="dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.660783 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.674248 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.690075 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.705644 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.705727 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.705743 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.705766 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.705778 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:48Z","lastTransitionTime":"2025-09-30T09:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.718226 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:48Z\\\",\\\"message\\\":\\\"09:49:48.535304 5985 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 09:49:48.535987 5985 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 09:49:48.536052 5985 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 09:49:48.536097 5985 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 09:49:48.536127 5985 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 09:49:48.536223 5985 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 09:49:48.536244 5985 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 09:49:48.536250 5985 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 09:49:48.536298 5985 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 09:49:48.536181 5985 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 09:49:48.536314 5985 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 09:49:48.536327 5985 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 09:49:48.536346 5985 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 09:49:48.536395 5985 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 09:49:48.536418 5985 factory.go:656] Stopping watch factory\\\\nI0930 09:49:48.536437 5985 ovnkube.go:599] Stopped ovnkube\\\\nI0930 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.735579 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.750572 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.768544 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.782199 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.800570 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.808792 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.808835 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.808848 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.808866 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.808877 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:48Z","lastTransitionTime":"2025-09-30T09:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.820212 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.837886 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.852492 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.865866 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.878408 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:48Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.911788 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.911837 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.911851 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.911869 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:48 crc kubenswrapper[4730]: I0930 09:49:48.911881 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:48Z","lastTransitionTime":"2025-09-30T09:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.014254 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.014307 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.014317 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.014334 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.014345 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:49Z","lastTransitionTime":"2025-09-30T09:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.117368 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.117434 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.117448 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.117477 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.117491 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:49Z","lastTransitionTime":"2025-09-30T09:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.220187 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.220252 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.220268 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.220293 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.220312 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:49Z","lastTransitionTime":"2025-09-30T09:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.324307 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.324370 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.324386 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.324410 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.324426 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:49Z","lastTransitionTime":"2025-09-30T09:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.427653 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.427713 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.427725 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.427746 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.427762 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:49Z","lastTransitionTime":"2025-09-30T09:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.530717 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.530769 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.530779 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.530797 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.530808 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:49Z","lastTransitionTime":"2025-09-30T09:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.633541 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.633580 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.633592 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.633628 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.633643 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:49Z","lastTransitionTime":"2025-09-30T09:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.649831 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/0.log" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.652330 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerStarted","Data":"1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88"} Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.652495 4730 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.667268 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.681927 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.697381 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.715125 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.728548 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.737648 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.737686 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.737697 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.737713 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.737724 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:49Z","lastTransitionTime":"2025-09-30T09:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.741786 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.756703 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.769853 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.784354 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.799556 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.816646 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.833024 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.840133 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.840179 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.840190 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.840209 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.840222 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:49Z","lastTransitionTime":"2025-09-30T09:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.850728 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.871116 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:48Z\\\",\\\"message\\\":\\\"09:49:48.535304 5985 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 09:49:48.535987 5985 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 09:49:48.536052 5985 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 09:49:48.536097 5985 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 09:49:48.536127 5985 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 09:49:48.536223 5985 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 09:49:48.536244 5985 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 09:49:48.536250 5985 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 09:49:48.536298 5985 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 09:49:48.536181 5985 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 09:49:48.536314 5985 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 09:49:48.536327 5985 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 09:49:48.536346 5985 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 09:49:48.536395 5985 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 09:49:48.536418 5985 factory.go:656] Stopping watch factory\\\\nI0930 09:49:48.536437 5985 ovnkube.go:599] Stopped ovnkube\\\\nI0930 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.943534 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.943600 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.943643 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.943670 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:49 crc kubenswrapper[4730]: I0930 09:49:49.943687 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:49Z","lastTransitionTime":"2025-09-30T09:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.046089 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.046128 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.046139 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.046154 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.046164 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:50Z","lastTransitionTime":"2025-09-30T09:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.149217 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.149266 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.149277 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.149293 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.149304 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:50Z","lastTransitionTime":"2025-09-30T09:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.252055 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.252143 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.252164 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.252190 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.252206 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:50Z","lastTransitionTime":"2025-09-30T09:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.356799 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.356870 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.356886 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.356909 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.356923 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:50Z","lastTransitionTime":"2025-09-30T09:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.380294 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.380373 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:50 crc kubenswrapper[4730]: E0930 09:49:50.380448 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.380373 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:50 crc kubenswrapper[4730]: E0930 09:49:50.380540 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:49:50 crc kubenswrapper[4730]: E0930 09:49:50.380656 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.459836 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.459885 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.459897 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.459914 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.459928 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:50Z","lastTransitionTime":"2025-09-30T09:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.563013 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.563069 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.563082 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.563104 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.563119 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:50Z","lastTransitionTime":"2025-09-30T09:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.658927 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/1.log" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.659564 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/0.log" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.662624 4730 generic.go:334] "Generic (PLEG): container finished" podID="823c4c28-801d-421e-b15f-02a17e300753" containerID="1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88" exitCode=1 Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.662672 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88"} Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.662712 4730 scope.go:117] "RemoveContainer" containerID="dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.663520 4730 scope.go:117] "RemoveContainer" containerID="1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88" Sep 30 09:49:50 crc kubenswrapper[4730]: E0930 09:49:50.663713 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.666873 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.666929 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.666947 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.666965 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.666978 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:50Z","lastTransitionTime":"2025-09-30T09:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.679534 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.700146 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.730466 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:48Z\\\",\\\"message\\\":\\\"09:49:48.535304 5985 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 09:49:48.535987 5985 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 09:49:48.536052 5985 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 09:49:48.536097 5985 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 09:49:48.536127 5985 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 09:49:48.536223 5985 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 09:49:48.536244 5985 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 09:49:48.536250 5985 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 09:49:48.536298 5985 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 09:49:48.536181 5985 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 09:49:48.536314 5985 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 09:49:48.536327 5985 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 09:49:48.536346 5985 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 09:49:48.536395 5985 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 09:49:48.536418 5985 factory.go:656] Stopping watch factory\\\\nI0930 09:49:48.536437 5985 ovnkube.go:599] Stopped ovnkube\\\\nI0930 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"55gtf\\\\nI0930 09:49:49.593780 6142 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0930 09:49:49.593778 6142 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 09:49:49.593434 6142 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.731520 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q"] Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.731997 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.735862 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.736430 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.749782 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.762859 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.763571 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c534fd9f-3767-4be6-a84e-45260fe2042f-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wh45q\" (UID: \"c534fd9f-3767-4be6-a84e-45260fe2042f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.763629 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c534fd9f-3767-4be6-a84e-45260fe2042f-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wh45q\" (UID: \"c534fd9f-3767-4be6-a84e-45260fe2042f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.763661 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n2mj\" (UniqueName: \"kubernetes.io/projected/c534fd9f-3767-4be6-a84e-45260fe2042f-kube-api-access-9n2mj\") pod \"ovnkube-control-plane-749d76644c-wh45q\" (UID: \"c534fd9f-3767-4be6-a84e-45260fe2042f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.763696 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c534fd9f-3767-4be6-a84e-45260fe2042f-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wh45q\" (UID: \"c534fd9f-3767-4be6-a84e-45260fe2042f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.770318 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.770358 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.770371 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.770391 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.770403 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:50Z","lastTransitionTime":"2025-09-30T09:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.775846 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.795236 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.810167 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.825365 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.837202 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.848389 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.864592 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.864822 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c534fd9f-3767-4be6-a84e-45260fe2042f-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wh45q\" (UID: \"c534fd9f-3767-4be6-a84e-45260fe2042f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.865325 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c534fd9f-3767-4be6-a84e-45260fe2042f-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wh45q\" (UID: \"c534fd9f-3767-4be6-a84e-45260fe2042f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.865407 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n2mj\" (UniqueName: \"kubernetes.io/projected/c534fd9f-3767-4be6-a84e-45260fe2042f-kube-api-access-9n2mj\") pod \"ovnkube-control-plane-749d76644c-wh45q\" (UID: \"c534fd9f-3767-4be6-a84e-45260fe2042f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.865482 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c534fd9f-3767-4be6-a84e-45260fe2042f-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wh45q\" (UID: \"c534fd9f-3767-4be6-a84e-45260fe2042f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.865798 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c534fd9f-3767-4be6-a84e-45260fe2042f-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wh45q\" (UID: \"c534fd9f-3767-4be6-a84e-45260fe2042f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.866215 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c534fd9f-3767-4be6-a84e-45260fe2042f-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wh45q\" (UID: \"c534fd9f-3767-4be6-a84e-45260fe2042f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.873520 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.873576 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.873588 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.873643 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.873656 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:50Z","lastTransitionTime":"2025-09-30T09:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.873673 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c534fd9f-3767-4be6-a84e-45260fe2042f-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wh45q\" (UID: \"c534fd9f-3767-4be6-a84e-45260fe2042f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.881411 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.887704 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n2mj\" (UniqueName: \"kubernetes.io/projected/c534fd9f-3767-4be6-a84e-45260fe2042f-kube-api-access-9n2mj\") pod \"ovnkube-control-plane-749d76644c-wh45q\" (UID: \"c534fd9f-3767-4be6-a84e-45260fe2042f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.893059 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.914851 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:48Z\\\",\\\"message\\\":\\\"09:49:48.535304 5985 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 09:49:48.535987 5985 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 09:49:48.536052 5985 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 09:49:48.536097 5985 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 09:49:48.536127 5985 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 09:49:48.536223 5985 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 09:49:48.536244 5985 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 09:49:48.536250 5985 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 09:49:48.536298 5985 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 09:49:48.536181 5985 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 09:49:48.536314 5985 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 09:49:48.536327 5985 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 09:49:48.536346 5985 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 09:49:48.536395 5985 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 09:49:48.536418 5985 factory.go:656] Stopping watch factory\\\\nI0930 09:49:48.536437 5985 ovnkube.go:599] Stopped ovnkube\\\\nI0930 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"55gtf\\\\nI0930 09:49:49.593780 6142 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0930 09:49:49.593778 6142 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 09:49:49.593434 6142 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.931492 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.946865 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.960485 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.977140 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.977218 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.977236 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.977268 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.977287 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:50Z","lastTransitionTime":"2025-09-30T09:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.978276 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:50 crc kubenswrapper[4730]: I0930 09:49:50.998176 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:50Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.015989 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.034039 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.046776 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.052636 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: W0930 09:49:51.066818 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc534fd9f_3767_4be6_a84e_45260fe2042f.slice/crio-5fbae81cbb75d12afd9eca4005d8a5d0bbb776646331d35d05ba87510b0dba83 WatchSource:0}: Error finding container 5fbae81cbb75d12afd9eca4005d8a5d0bbb776646331d35d05ba87510b0dba83: Status 404 returned error can't find the container with id 5fbae81cbb75d12afd9eca4005d8a5d0bbb776646331d35d05ba87510b0dba83 Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.070001 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.079978 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.080051 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.080070 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.080094 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.080107 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:51Z","lastTransitionTime":"2025-09-30T09:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.091474 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.106024 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.122287 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.138953 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.151394 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.183240 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.183292 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.183308 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.183327 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.183339 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:51Z","lastTransitionTime":"2025-09-30T09:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.288087 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.288164 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.288184 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.288215 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.288250 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:51Z","lastTransitionTime":"2025-09-30T09:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.392176 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.392242 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.392257 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.392281 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.392296 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:51Z","lastTransitionTime":"2025-09-30T09:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.467527 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-dqqrb"] Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.468319 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:51 crc kubenswrapper[4730]: E0930 09:49:51.468444 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.492090 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.495024 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.495071 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.495090 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.495117 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.495136 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:51Z","lastTransitionTime":"2025-09-30T09:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.507667 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.525454 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.542101 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.559907 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.572369 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.573686 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.573737 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9sm8\" (UniqueName: \"kubernetes.io/projected/be86a67e-c663-4551-9ecf-a8c2a9801cd7-kube-api-access-q9sm8\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.585423 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.597383 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.597453 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.597481 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.597542 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.597571 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:51Z","lastTransitionTime":"2025-09-30T09:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.598739 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.615750 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.630367 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.645949 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.668937 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/1.log" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.670840 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:48Z\\\",\\\"message\\\":\\\"09:49:48.535304 5985 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 09:49:48.535987 5985 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 09:49:48.536052 5985 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 09:49:48.536097 5985 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 09:49:48.536127 5985 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 09:49:48.536223 5985 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 09:49:48.536244 5985 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 09:49:48.536250 5985 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 09:49:48.536298 5985 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 09:49:48.536181 5985 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 09:49:48.536314 5985 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 09:49:48.536327 5985 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 09:49:48.536346 5985 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 09:49:48.536395 5985 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 09:49:48.536418 5985 factory.go:656] Stopping watch factory\\\\nI0930 09:49:48.536437 5985 ovnkube.go:599] Stopped ovnkube\\\\nI0930 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"55gtf\\\\nI0930 09:49:49.593780 6142 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0930 09:49:49.593778 6142 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 09:49:49.593434 6142 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.674244 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.674286 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9sm8\" (UniqueName: \"kubernetes.io/projected/be86a67e-c663-4551-9ecf-a8c2a9801cd7-kube-api-access-q9sm8\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:51 crc kubenswrapper[4730]: E0930 09:49:51.674435 4730 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:49:51 crc kubenswrapper[4730]: E0930 09:49:51.674521 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs podName:be86a67e-c663-4551-9ecf-a8c2a9801cd7 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:52.174495514 +0000 UTC m=+36.507755547 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs") pod "network-metrics-daemon-dqqrb" (UID: "be86a67e-c663-4551-9ecf-a8c2a9801cd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.675140 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" event={"ID":"c534fd9f-3767-4be6-a84e-45260fe2042f","Type":"ContainerStarted","Data":"f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4"} Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.675224 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" event={"ID":"c534fd9f-3767-4be6-a84e-45260fe2042f","Type":"ContainerStarted","Data":"c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155"} Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.675242 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" event={"ID":"c534fd9f-3767-4be6-a84e-45260fe2042f","Type":"ContainerStarted","Data":"5fbae81cbb75d12afd9eca4005d8a5d0bbb776646331d35d05ba87510b0dba83"} Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.685505 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.691184 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9sm8\" (UniqueName: \"kubernetes.io/projected/be86a67e-c663-4551-9ecf-a8c2a9801cd7-kube-api-access-q9sm8\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.700259 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.701057 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.701124 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.701140 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.701163 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.701179 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:51Z","lastTransitionTime":"2025-09-30T09:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.716728 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.736759 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.754200 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.768535 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.784199 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.796634 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.804770 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.804822 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.804834 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.804850 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.804862 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:51Z","lastTransitionTime":"2025-09-30T09:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.811886 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.837526 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.852855 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.868795 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.882359 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.894343 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.905917 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.908344 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.908379 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.908391 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.908411 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.908425 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:51Z","lastTransitionTime":"2025-09-30T09:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.918004 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.931297 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.942270 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.954039 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:51 crc kubenswrapper[4730]: I0930 09:49:51.973334 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd35a16a93b6c97e4e999d719b5d4dff7fa58ab8e90128e418c26e8cb0752092\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:48Z\\\",\\\"message\\\":\\\"09:49:48.535304 5985 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 09:49:48.535987 5985 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 09:49:48.536052 5985 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 09:49:48.536097 5985 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 09:49:48.536127 5985 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 09:49:48.536223 5985 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0930 09:49:48.536244 5985 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 09:49:48.536250 5985 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 09:49:48.536298 5985 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 09:49:48.536181 5985 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 09:49:48.536314 5985 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 09:49:48.536327 5985 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 09:49:48.536346 5985 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 09:49:48.536395 5985 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 09:49:48.536418 5985 factory.go:656] Stopping watch factory\\\\nI0930 09:49:48.536437 5985 ovnkube.go:599] Stopped ovnkube\\\\nI0930 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"55gtf\\\\nI0930 09:49:49.593780 6142 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0930 09:49:49.593778 6142 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 09:49:49.593434 6142 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:51Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.012145 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.012201 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.012219 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.012244 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.012264 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.079234 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.079450 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.079491 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.079532 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:50:08.079489048 +0000 UTC m=+52.412749131 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.079663 4730 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.079701 4730 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.079737 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:50:08.079721424 +0000 UTC m=+52.412981417 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.079782 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:50:08.079762375 +0000 UTC m=+52.413022398 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.115289 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.115347 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.115365 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.115389 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.115405 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.180887 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.181126 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.181168 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.181183 4730 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.181257 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 09:50:08.181233243 +0000 UTC m=+52.514493236 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.181294 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.181329 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.181409 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.181420 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.181429 4730 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.181453 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 09:50:08.181445898 +0000 UTC m=+52.514705881 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.181507 4730 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.181674 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs podName:be86a67e-c663-4551-9ecf-a8c2a9801cd7 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:53.181599272 +0000 UTC m=+37.514859265 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs") pod "network-metrics-daemon-dqqrb" (UID: "be86a67e-c663-4551-9ecf-a8c2a9801cd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.218692 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.218747 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.218761 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.218783 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.218797 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.321791 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.321846 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.321859 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.321881 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.321894 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.380151 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.380247 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.380167 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.380346 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.380453 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.380586 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.426319 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.426397 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.426418 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.426450 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.426470 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.530729 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.530803 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.530822 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.530850 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.530877 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.634460 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.634562 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.634588 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.634665 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.634691 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.723475 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.723520 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.723530 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.723546 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.723558 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.739634 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:52Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.745313 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.745465 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.745487 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.745558 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.745578 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.761822 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:52Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.767075 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.767142 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.767160 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.767186 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.767202 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.782353 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:52Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.788433 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.789579 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.789647 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.789717 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.789742 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.810734 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:52Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.815714 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.815770 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.815782 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.815802 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.815816 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.830004 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:52Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:52 crc kubenswrapper[4730]: E0930 09:49:52.830214 4730 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.832280 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.832317 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.832332 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.832353 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.832368 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.936528 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.936597 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.936652 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.936677 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:52 crc kubenswrapper[4730]: I0930 09:49:52.936695 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:52Z","lastTransitionTime":"2025-09-30T09:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.040751 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.040807 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.040823 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.040846 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.040860 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:53Z","lastTransitionTime":"2025-09-30T09:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.144717 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.144786 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.144802 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.144829 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.144844 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:53Z","lastTransitionTime":"2025-09-30T09:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.193794 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:53 crc kubenswrapper[4730]: E0930 09:49:53.194071 4730 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:49:53 crc kubenswrapper[4730]: E0930 09:49:53.194186 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs podName:be86a67e-c663-4551-9ecf-a8c2a9801cd7 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:55.194158674 +0000 UTC m=+39.527418677 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs") pod "network-metrics-daemon-dqqrb" (UID: "be86a67e-c663-4551-9ecf-a8c2a9801cd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.248688 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.248734 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.248745 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.248767 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.248780 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:53Z","lastTransitionTime":"2025-09-30T09:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.351745 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.351796 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.351807 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.351826 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.351836 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:53Z","lastTransitionTime":"2025-09-30T09:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.379965 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:53 crc kubenswrapper[4730]: E0930 09:49:53.380262 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.455789 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.455875 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.455904 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.455938 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.455962 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:53Z","lastTransitionTime":"2025-09-30T09:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.560370 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.560443 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.560459 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.560481 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.560498 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:53Z","lastTransitionTime":"2025-09-30T09:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.664036 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.664127 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.664147 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.664177 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.664199 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:53Z","lastTransitionTime":"2025-09-30T09:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.768183 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.768243 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.768255 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.768275 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.768289 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:53Z","lastTransitionTime":"2025-09-30T09:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.871708 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.871770 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.871792 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.871816 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.871831 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:53Z","lastTransitionTime":"2025-09-30T09:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.974681 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.974752 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.974770 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.974787 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:53 crc kubenswrapper[4730]: I0930 09:49:53.974798 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:53Z","lastTransitionTime":"2025-09-30T09:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.024479 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.025487 4730 scope.go:117] "RemoveContainer" containerID="1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88" Sep 30 09:49:54 crc kubenswrapper[4730]: E0930 09:49:54.025686 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.043769 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.059928 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.078033 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.078084 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.078103 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.078126 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.078142 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:54Z","lastTransitionTime":"2025-09-30T09:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.080670 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.098599 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.117647 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.134474 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.148025 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.159178 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.170789 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.181808 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.181854 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.181867 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.181884 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.181895 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:54Z","lastTransitionTime":"2025-09-30T09:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.193287 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.205294 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.219223 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.240027 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"55gtf\\\\nI0930 09:49:49.593780 6142 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0930 09:49:49.593778 6142 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 09:49:49.593434 6142 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.258845 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.273426 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.286105 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.286168 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.286185 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.286211 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.286228 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:54Z","lastTransitionTime":"2025-09-30T09:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.290798 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:54Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.380678 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.380721 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.380819 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:54 crc kubenswrapper[4730]: E0930 09:49:54.380948 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:49:54 crc kubenswrapper[4730]: E0930 09:49:54.381127 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:49:54 crc kubenswrapper[4730]: E0930 09:49:54.381535 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.388561 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.388630 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.388648 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.388670 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.388682 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:54Z","lastTransitionTime":"2025-09-30T09:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.492008 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.492067 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.492079 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.492098 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.492111 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:54Z","lastTransitionTime":"2025-09-30T09:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.595138 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.595725 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.595740 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.595759 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.595772 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:54Z","lastTransitionTime":"2025-09-30T09:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.698260 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.698301 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.698313 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.698329 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.698339 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:54Z","lastTransitionTime":"2025-09-30T09:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.801378 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.801448 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.801465 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.801491 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.801511 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:54Z","lastTransitionTime":"2025-09-30T09:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.904260 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.904320 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.904331 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.904352 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:54 crc kubenswrapper[4730]: I0930 09:49:54.904362 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:54Z","lastTransitionTime":"2025-09-30T09:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.007643 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.007701 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.007711 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.007731 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.007742 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:55Z","lastTransitionTime":"2025-09-30T09:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.111205 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.111265 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.111279 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.111297 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.111309 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:55Z","lastTransitionTime":"2025-09-30T09:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.213954 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.213997 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.214007 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.214023 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.214057 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:55Z","lastTransitionTime":"2025-09-30T09:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.217226 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:55 crc kubenswrapper[4730]: E0930 09:49:55.217479 4730 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:49:55 crc kubenswrapper[4730]: E0930 09:49:55.217624 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs podName:be86a67e-c663-4551-9ecf-a8c2a9801cd7 nodeName:}" failed. No retries permitted until 2025-09-30 09:49:59.217580301 +0000 UTC m=+43.550840494 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs") pod "network-metrics-daemon-dqqrb" (UID: "be86a67e-c663-4551-9ecf-a8c2a9801cd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.317382 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.317438 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.317482 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.317503 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.317521 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:55Z","lastTransitionTime":"2025-09-30T09:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.380674 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:55 crc kubenswrapper[4730]: E0930 09:49:55.380873 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.421310 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.421361 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.421372 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.421392 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.421405 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:55Z","lastTransitionTime":"2025-09-30T09:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.524976 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.525063 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.525083 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.525113 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.525145 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:55Z","lastTransitionTime":"2025-09-30T09:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.628603 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.628696 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.628710 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.628732 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.628750 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:55Z","lastTransitionTime":"2025-09-30T09:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.732015 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.732060 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.732069 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.732085 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.732098 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:55Z","lastTransitionTime":"2025-09-30T09:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.836043 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.836108 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.836123 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.836142 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.836159 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:55Z","lastTransitionTime":"2025-09-30T09:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.939431 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.939485 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.939500 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.939525 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:55 crc kubenswrapper[4730]: I0930 09:49:55.939540 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:55Z","lastTransitionTime":"2025-09-30T09:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.043180 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.043246 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.043265 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.043293 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.043312 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:56Z","lastTransitionTime":"2025-09-30T09:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.146554 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.146636 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.146647 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.146668 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.146682 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:56Z","lastTransitionTime":"2025-09-30T09:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.248663 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.248744 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.248758 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.248774 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.248785 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:56Z","lastTransitionTime":"2025-09-30T09:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.351858 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.351911 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.351920 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.351947 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.351958 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:56Z","lastTransitionTime":"2025-09-30T09:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.380988 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.381080 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.381181 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:56 crc kubenswrapper[4730]: E0930 09:49:56.381194 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:49:56 crc kubenswrapper[4730]: E0930 09:49:56.381318 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:49:56 crc kubenswrapper[4730]: E0930 09:49:56.381432 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.402552 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.416842 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.430458 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.446794 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.454533 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.454726 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.454788 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.454854 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.454939 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:56Z","lastTransitionTime":"2025-09-30T09:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.473810 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.493726 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.508699 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.520292 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.544325 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.559402 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.559456 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.559468 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.559491 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.559502 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:56Z","lastTransitionTime":"2025-09-30T09:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.566078 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"55gtf\\\\nI0930 09:49:49.593780 6142 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0930 09:49:49.593778 6142 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 09:49:49.593434 6142 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.578886 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.595454 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.610351 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.626224 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.642482 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.658074 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:56Z is after 2025-08-24T17:21:41Z" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.662310 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.662373 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.662385 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.662406 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.662419 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:56Z","lastTransitionTime":"2025-09-30T09:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.765655 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.765715 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.765733 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.765758 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.765776 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:56Z","lastTransitionTime":"2025-09-30T09:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.869231 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.869298 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.869315 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.869345 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.869395 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:56Z","lastTransitionTime":"2025-09-30T09:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.972283 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.972647 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.972716 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.972795 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:56 crc kubenswrapper[4730]: I0930 09:49:56.972853 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:56Z","lastTransitionTime":"2025-09-30T09:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.075283 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.075560 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.075673 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.075750 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.075836 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:57Z","lastTransitionTime":"2025-09-30T09:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.179335 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.179389 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.179404 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.179426 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.179439 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:57Z","lastTransitionTime":"2025-09-30T09:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.282002 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.282399 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.282504 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.282587 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.282743 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:57Z","lastTransitionTime":"2025-09-30T09:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.380859 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:57 crc kubenswrapper[4730]: E0930 09:49:57.381086 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.387326 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.387383 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.387402 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.387426 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.387449 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:57Z","lastTransitionTime":"2025-09-30T09:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.491010 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.491417 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.491651 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.491820 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.491978 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:57Z","lastTransitionTime":"2025-09-30T09:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.596196 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.596297 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.596320 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.596353 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.596378 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:57Z","lastTransitionTime":"2025-09-30T09:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.698676 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.698745 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.698761 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.698782 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.698798 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:57Z","lastTransitionTime":"2025-09-30T09:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.802267 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.802351 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.802376 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.802408 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.802432 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:57Z","lastTransitionTime":"2025-09-30T09:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.905821 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.905902 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.905920 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.905944 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:57 crc kubenswrapper[4730]: I0930 09:49:57.905960 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:57Z","lastTransitionTime":"2025-09-30T09:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.008442 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.008863 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.008980 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.009067 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.009190 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:58Z","lastTransitionTime":"2025-09-30T09:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.112550 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.112901 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.112985 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.113077 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.113142 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:58Z","lastTransitionTime":"2025-09-30T09:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.216360 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.216436 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.216456 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.216484 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.216502 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:58Z","lastTransitionTime":"2025-09-30T09:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.319603 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.319705 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.319716 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.319736 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.319747 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:58Z","lastTransitionTime":"2025-09-30T09:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.379920 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.379990 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.380114 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:49:58 crc kubenswrapper[4730]: E0930 09:49:58.380255 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:49:58 crc kubenswrapper[4730]: E0930 09:49:58.380446 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:49:58 crc kubenswrapper[4730]: E0930 09:49:58.380566 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.423755 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.423867 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.423880 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.423903 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.423916 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:58Z","lastTransitionTime":"2025-09-30T09:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.527751 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.527816 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.527835 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.527858 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.527877 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:58Z","lastTransitionTime":"2025-09-30T09:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.630984 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.631038 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.631051 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.631077 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.631092 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:58Z","lastTransitionTime":"2025-09-30T09:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.734210 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.734275 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.734296 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.734318 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.734338 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:58Z","lastTransitionTime":"2025-09-30T09:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.837153 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.837231 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.837445 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.837481 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.837498 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:58Z","lastTransitionTime":"2025-09-30T09:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.940366 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.940950 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.941016 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.941094 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:58 crc kubenswrapper[4730]: I0930 09:49:58.941152 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:58Z","lastTransitionTime":"2025-09-30T09:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.044112 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.044171 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.044187 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.044209 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.044222 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:59Z","lastTransitionTime":"2025-09-30T09:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.146963 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.147000 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.147012 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.147030 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.147041 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:59Z","lastTransitionTime":"2025-09-30T09:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.250085 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.250129 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.250139 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.250158 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.250176 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:59Z","lastTransitionTime":"2025-09-30T09:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.262666 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:59 crc kubenswrapper[4730]: E0930 09:49:59.262949 4730 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:49:59 crc kubenswrapper[4730]: E0930 09:49:59.263093 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs podName:be86a67e-c663-4551-9ecf-a8c2a9801cd7 nodeName:}" failed. No retries permitted until 2025-09-30 09:50:07.263053736 +0000 UTC m=+51.596313769 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs") pod "network-metrics-daemon-dqqrb" (UID: "be86a67e-c663-4551-9ecf-a8c2a9801cd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.353115 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.353173 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.353190 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.353213 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.353240 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:59Z","lastTransitionTime":"2025-09-30T09:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.379964 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:49:59 crc kubenswrapper[4730]: E0930 09:49:59.380750 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.457040 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.457106 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.457123 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.457142 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.457155 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:59Z","lastTransitionTime":"2025-09-30T09:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.560078 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.560144 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.560157 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.560198 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.560213 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:59Z","lastTransitionTime":"2025-09-30T09:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.663345 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.663409 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.663430 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.663460 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.663477 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:59Z","lastTransitionTime":"2025-09-30T09:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.766397 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.766457 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.766476 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.766500 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.766517 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:59Z","lastTransitionTime":"2025-09-30T09:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.870037 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.870099 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.870118 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.870146 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.870172 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:59Z","lastTransitionTime":"2025-09-30T09:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.973341 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.973393 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.973413 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.973435 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:49:59 crc kubenswrapper[4730]: I0930 09:49:59.973452 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:49:59Z","lastTransitionTime":"2025-09-30T09:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.076639 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.076682 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.076695 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.076714 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.076733 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:00Z","lastTransitionTime":"2025-09-30T09:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.180000 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.180051 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.180065 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.180172 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.180189 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:00Z","lastTransitionTime":"2025-09-30T09:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.282889 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.282924 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.282932 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.282947 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.282958 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:00Z","lastTransitionTime":"2025-09-30T09:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.380269 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:00 crc kubenswrapper[4730]: E0930 09:50:00.380523 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.380802 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.380969 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:00 crc kubenswrapper[4730]: E0930 09:50:00.381251 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:00 crc kubenswrapper[4730]: E0930 09:50:00.381384 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.386581 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.386706 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.386721 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.386755 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.386769 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:00Z","lastTransitionTime":"2025-09-30T09:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.490987 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.491033 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.491043 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.491062 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.491074 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:00Z","lastTransitionTime":"2025-09-30T09:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.594031 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.594254 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.594270 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.594294 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.594308 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:00Z","lastTransitionTime":"2025-09-30T09:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.697441 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.697499 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.697510 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.697527 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.697539 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:00Z","lastTransitionTime":"2025-09-30T09:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.800514 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.800559 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.800570 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.800588 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.800600 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:00Z","lastTransitionTime":"2025-09-30T09:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.904190 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.904506 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.904568 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.904678 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:00 crc kubenswrapper[4730]: I0930 09:50:00.904768 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:00Z","lastTransitionTime":"2025-09-30T09:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.007723 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.007776 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.007789 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.007811 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.007824 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:01Z","lastTransitionTime":"2025-09-30T09:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.110961 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.111033 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.111051 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.111078 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.111099 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:01Z","lastTransitionTime":"2025-09-30T09:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.213725 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.213769 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.213783 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.213800 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.213814 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:01Z","lastTransitionTime":"2025-09-30T09:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.317512 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.317645 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.317663 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.317684 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.317697 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:01Z","lastTransitionTime":"2025-09-30T09:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.380417 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:01 crc kubenswrapper[4730]: E0930 09:50:01.380681 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.420653 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.420706 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.420717 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.420736 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.420751 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:01Z","lastTransitionTime":"2025-09-30T09:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.524856 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.524906 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.524919 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.524941 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.524954 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:01Z","lastTransitionTime":"2025-09-30T09:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.627301 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.627348 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.627361 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.627382 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.627394 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:01Z","lastTransitionTime":"2025-09-30T09:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.730444 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.730513 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.730532 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.730579 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.730596 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:01Z","lastTransitionTime":"2025-09-30T09:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.833194 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.833240 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.833251 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.833272 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.833282 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:01Z","lastTransitionTime":"2025-09-30T09:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.938471 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.938854 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.938942 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.939036 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:01 crc kubenswrapper[4730]: I0930 09:50:01.939118 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:01Z","lastTransitionTime":"2025-09-30T09:50:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.041576 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.042229 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.042301 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.042367 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.042449 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:02Z","lastTransitionTime":"2025-09-30T09:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.145284 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.145317 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.145327 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.145341 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.145352 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:02Z","lastTransitionTime":"2025-09-30T09:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.247969 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.248018 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.248035 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.248058 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.248072 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:02Z","lastTransitionTime":"2025-09-30T09:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.351471 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.351513 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.351527 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.351546 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.351561 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:02Z","lastTransitionTime":"2025-09-30T09:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.380492 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.380492 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:02 crc kubenswrapper[4730]: E0930 09:50:02.380719 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:02 crc kubenswrapper[4730]: E0930 09:50:02.380831 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.381252 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:02 crc kubenswrapper[4730]: E0930 09:50:02.381481 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.454206 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.454559 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.454760 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.454892 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.455025 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:02Z","lastTransitionTime":"2025-09-30T09:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.558440 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.558933 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.559042 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.559158 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.559313 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:02Z","lastTransitionTime":"2025-09-30T09:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.662602 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.662710 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.662730 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.662761 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.662787 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:02Z","lastTransitionTime":"2025-09-30T09:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.765562 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.765632 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.765644 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.765671 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.765689 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:02Z","lastTransitionTime":"2025-09-30T09:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.868716 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.868790 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.868809 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.868840 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.868857 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:02Z","lastTransitionTime":"2025-09-30T09:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.955172 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.955434 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.955589 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.955687 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.955750 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:02Z","lastTransitionTime":"2025-09-30T09:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:02 crc kubenswrapper[4730]: E0930 09:50:02.971553 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:02Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.976887 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.976969 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.976985 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.977009 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:02 crc kubenswrapper[4730]: I0930 09:50:02.977022 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:02Z","lastTransitionTime":"2025-09-30T09:50:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:02 crc kubenswrapper[4730]: E0930 09:50:02.997404 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:02Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.002543 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.002602 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.002638 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.002669 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.002687 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:03Z","lastTransitionTime":"2025-09-30T09:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:03 crc kubenswrapper[4730]: E0930 09:50:03.017251 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:03Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.022553 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.022591 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.022600 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.022630 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.022641 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:03Z","lastTransitionTime":"2025-09-30T09:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:03 crc kubenswrapper[4730]: E0930 09:50:03.040235 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:03Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.045543 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.045636 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.045656 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.045678 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.045695 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:03Z","lastTransitionTime":"2025-09-30T09:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:03 crc kubenswrapper[4730]: E0930 09:50:03.061865 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:03Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:03 crc kubenswrapper[4730]: E0930 09:50:03.062065 4730 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.064154 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.064206 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.064224 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.064278 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.064346 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:03Z","lastTransitionTime":"2025-09-30T09:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.167960 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.167998 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.168034 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.168055 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.168067 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:03Z","lastTransitionTime":"2025-09-30T09:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.270822 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.270898 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.270908 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.270928 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.270939 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:03Z","lastTransitionTime":"2025-09-30T09:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.375707 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.375859 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.375925 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.375957 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.376026 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:03Z","lastTransitionTime":"2025-09-30T09:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.380139 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:03 crc kubenswrapper[4730]: E0930 09:50:03.380400 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.479720 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.480310 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.480503 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.480838 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.481049 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:03Z","lastTransitionTime":"2025-09-30T09:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.585856 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.585950 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.585975 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.586009 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.586036 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:03Z","lastTransitionTime":"2025-09-30T09:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.689558 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.689690 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.689721 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.689764 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.689797 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:03Z","lastTransitionTime":"2025-09-30T09:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.793527 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.793762 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.793800 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.793836 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.793858 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:03Z","lastTransitionTime":"2025-09-30T09:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.896955 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.897007 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.897021 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.897040 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:03 crc kubenswrapper[4730]: I0930 09:50:03.897052 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:03Z","lastTransitionTime":"2025-09-30T09:50:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.000280 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.000334 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.000349 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.000373 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.000390 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:04Z","lastTransitionTime":"2025-09-30T09:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.103938 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.103989 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.104002 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.104022 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.104039 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:04Z","lastTransitionTime":"2025-09-30T09:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.207593 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.208203 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.208434 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.208530 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.208627 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:04Z","lastTransitionTime":"2025-09-30T09:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.311930 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.311984 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.311996 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.312011 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.312021 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:04Z","lastTransitionTime":"2025-09-30T09:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.380627 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.380663 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.380741 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:04 crc kubenswrapper[4730]: E0930 09:50:04.380799 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:04 crc kubenswrapper[4730]: E0930 09:50:04.380846 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:04 crc kubenswrapper[4730]: E0930 09:50:04.380929 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.415421 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.415461 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.415472 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.415491 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.415503 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:04Z","lastTransitionTime":"2025-09-30T09:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.518782 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.518879 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.518899 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.518927 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.518947 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:04Z","lastTransitionTime":"2025-09-30T09:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.621773 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.621831 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.621849 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.621871 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.621893 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:04Z","lastTransitionTime":"2025-09-30T09:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.724912 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.725007 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.725018 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.725041 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.725055 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:04Z","lastTransitionTime":"2025-09-30T09:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.828035 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.828068 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.828078 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.828093 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.828104 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:04Z","lastTransitionTime":"2025-09-30T09:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.932395 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.932453 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.932466 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.932488 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:04 crc kubenswrapper[4730]: I0930 09:50:04.932502 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:04Z","lastTransitionTime":"2025-09-30T09:50:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.034569 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.034655 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.034671 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.034692 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.034704 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:05Z","lastTransitionTime":"2025-09-30T09:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.137652 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.137712 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.137729 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.137751 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.137764 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:05Z","lastTransitionTime":"2025-09-30T09:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.241234 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.241288 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.241299 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.241330 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.241341 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:05Z","lastTransitionTime":"2025-09-30T09:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.344914 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.344992 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.345025 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.345060 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.345082 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:05Z","lastTransitionTime":"2025-09-30T09:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.380454 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:05 crc kubenswrapper[4730]: E0930 09:50:05.380685 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.449652 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.449714 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.449728 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.449752 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.449768 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:05Z","lastTransitionTime":"2025-09-30T09:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.553475 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.553548 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.553571 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.553603 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.553688 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:05Z","lastTransitionTime":"2025-09-30T09:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.656679 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.656749 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.656768 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.656797 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.656819 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:05Z","lastTransitionTime":"2025-09-30T09:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.760793 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.760879 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.760905 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.760938 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.760961 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:05Z","lastTransitionTime":"2025-09-30T09:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.863651 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.863782 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.863802 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.863834 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.863853 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:05Z","lastTransitionTime":"2025-09-30T09:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.967249 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.967300 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.967311 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.967332 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:05 crc kubenswrapper[4730]: I0930 09:50:05.967345 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:05Z","lastTransitionTime":"2025-09-30T09:50:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.071234 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.071275 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.071284 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.071302 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.071314 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:06Z","lastTransitionTime":"2025-09-30T09:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.174640 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.174699 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.174711 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.174732 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.174745 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:06Z","lastTransitionTime":"2025-09-30T09:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.277578 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.277715 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.277760 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.277798 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.277820 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:06Z","lastTransitionTime":"2025-09-30T09:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.380275 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:06 crc kubenswrapper[4730]: E0930 09:50:06.380446 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.381076 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:06 crc kubenswrapper[4730]: E0930 09:50:06.381247 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.381343 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.381413 4730 scope.go:117] "RemoveContainer" containerID="1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88" Sep 30 09:50:06 crc kubenswrapper[4730]: E0930 09:50:06.381515 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.382290 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.382326 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.382337 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.382354 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.382366 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:06Z","lastTransitionTime":"2025-09-30T09:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.411940 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"55gtf\\\\nI0930 09:49:49.593780 6142 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0930 09:49:49.593778 6142 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 09:49:49.593434 6142 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.434018 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.454303 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.476251 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.484900 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.484989 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.485009 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.485046 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.485072 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:06Z","lastTransitionTime":"2025-09-30T09:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.499990 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.517208 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.534279 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.551721 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.567507 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.583592 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.601839 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.601893 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.601906 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.601927 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.601940 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:06Z","lastTransitionTime":"2025-09-30T09:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.610806 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.627541 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.642954 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.659715 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.674054 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.689780 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.709007 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.709058 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.709069 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.709089 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.709102 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:06Z","lastTransitionTime":"2025-09-30T09:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.733483 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/1.log" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.737032 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerStarted","Data":"920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1"} Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.738764 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.754341 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.769020 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.781748 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.800439 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"55gtf\\\\nI0930 09:49:49.593780 6142 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0930 09:49:49.593778 6142 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 09:49:49.593434 6142 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.811423 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.811455 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.811464 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.811480 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.811490 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:06Z","lastTransitionTime":"2025-09-30T09:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.811963 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.826112 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.840598 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.858917 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.874722 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.896517 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.910052 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.913955 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.913986 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.913997 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.914015 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.914027 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:06Z","lastTransitionTime":"2025-09-30T09:50:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.927165 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.945293 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.961825 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.979517 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:06 crc kubenswrapper[4730]: I0930 09:50:06.992326 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:06Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.016987 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.017050 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.017064 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.017088 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.017102 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:07Z","lastTransitionTime":"2025-09-30T09:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.120169 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.120226 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.120253 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.120274 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.120289 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:07Z","lastTransitionTime":"2025-09-30T09:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.223972 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.224035 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.224048 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.224070 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.224084 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:07Z","lastTransitionTime":"2025-09-30T09:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.326999 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.327039 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.327047 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.327063 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.327076 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:07Z","lastTransitionTime":"2025-09-30T09:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.361805 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:07 crc kubenswrapper[4730]: E0930 09:50:07.361992 4730 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:50:07 crc kubenswrapper[4730]: E0930 09:50:07.362066 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs podName:be86a67e-c663-4551-9ecf-a8c2a9801cd7 nodeName:}" failed. No retries permitted until 2025-09-30 09:50:23.36203988 +0000 UTC m=+67.695299873 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs") pod "network-metrics-daemon-dqqrb" (UID: "be86a67e-c663-4551-9ecf-a8c2a9801cd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.380476 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:07 crc kubenswrapper[4730]: E0930 09:50:07.380689 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.430724 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.430781 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.430794 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.430816 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.430833 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:07Z","lastTransitionTime":"2025-09-30T09:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.508114 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.521138 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.523889 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.533699 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.533825 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.533908 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.533983 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.534059 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:07Z","lastTransitionTime":"2025-09-30T09:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.540589 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.556395 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.574857 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.594193 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"55gtf\\\\nI0930 09:49:49.593780 6142 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0930 09:49:49.593778 6142 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 09:49:49.593434 6142 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.609345 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.627742 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.637644 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.637682 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.637693 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.637710 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.637721 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:07Z","lastTransitionTime":"2025-09-30T09:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.649912 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.672094 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.686961 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.701142 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.717116 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.735003 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.740385 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.740424 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.740435 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.740452 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.740464 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:07Z","lastTransitionTime":"2025-09-30T09:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.742444 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/2.log" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.743823 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/1.log" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.746559 4730 generic.go:334] "Generic (PLEG): container finished" podID="823c4c28-801d-421e-b15f-02a17e300753" containerID="920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1" exitCode=1 Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.746660 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1"} Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.746735 4730 scope.go:117] "RemoveContainer" containerID="1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.747545 4730 scope.go:117] "RemoveContainer" containerID="920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1" Sep 30 09:50:07 crc kubenswrapper[4730]: E0930 09:50:07.747707 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.750431 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.764706 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.778054 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.791478 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.806055 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.819721 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.841496 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ad89aefc193452c8800d8680d37a4b800155f7d26f323088ad7b7e636adaa88\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"message\\\":\\\"55gtf\\\\nI0930 09:49:49.593780 6142 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0930 09:49:49.593778 6142 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 09:49:49.593434 6142 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:49:49Z is after \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"message\\\":\\\"/multus-additional-cni-plugins-p4xvk\\\\nF0930 09:50:07.486769 6349 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z]\\\\nI0930 09:50:07.486778 6349 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI0930 09:50:07.486723 6349 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-operator]} name:Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.843317 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.843375 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.843389 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.843411 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.843423 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:07Z","lastTransitionTime":"2025-09-30T09:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.854875 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.869919 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.884415 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.896552 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.911920 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.925932 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4482712-8a6c-4e35-8e85-0777588ab827\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d84dba8205ee9f4469b0277711b38ff82a1068b3f1cd951ea4a4eefab51ba88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58b48296b6764c18fc1b5163c1ef503b124b97f11f287fbbbc5b1144ecd06bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://461de41b45c19fbaf8e606cd30458759972ca804bac7e4cd5495efd8f30280fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.940565 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.946126 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.946163 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.946173 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.946192 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.946203 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:07Z","lastTransitionTime":"2025-09-30T09:50:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.955266 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.968458 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:07 crc kubenswrapper[4730]: I0930 09:50:07.991948 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.007792 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.028813 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.046956 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.049138 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.049178 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.049188 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.049206 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.049217 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:08Z","lastTransitionTime":"2025-09-30T09:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.152875 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.152945 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.152958 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.152983 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.152997 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:08Z","lastTransitionTime":"2025-09-30T09:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.170481 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.170772 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.170824 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:50:40.170786284 +0000 UTC m=+84.504046277 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.170887 4730 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.170908 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.170959 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:50:40.170939448 +0000 UTC m=+84.504199631 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.171126 4730 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.171259 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:50:40.171231006 +0000 UTC m=+84.504490999 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.256112 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.256163 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.256174 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.256194 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.256205 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:08Z","lastTransitionTime":"2025-09-30T09:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.271978 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.272054 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.272206 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.272230 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.272230 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.272244 4730 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.272259 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.272278 4730 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.272305 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 09:50:40.272287851 +0000 UTC m=+84.605547844 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.272332 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 09:50:40.272315532 +0000 UTC m=+84.605575535 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.358756 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.358795 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.358805 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.358821 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.358830 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:08Z","lastTransitionTime":"2025-09-30T09:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.380216 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.380370 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.380795 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.380870 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.381037 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.381130 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.462164 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.462208 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.462221 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.462242 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.462255 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:08Z","lastTransitionTime":"2025-09-30T09:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.565036 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.565086 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.565099 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.565117 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.565131 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:08Z","lastTransitionTime":"2025-09-30T09:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.667876 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.667933 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.667957 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.667978 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.667996 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:08Z","lastTransitionTime":"2025-09-30T09:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.753132 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/2.log" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.758505 4730 scope.go:117] "RemoveContainer" containerID="920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1" Sep 30 09:50:08 crc kubenswrapper[4730]: E0930 09:50:08.758759 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.773039 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.773106 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.773119 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.773138 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.773166 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:08Z","lastTransitionTime":"2025-09-30T09:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.776800 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.792052 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.808769 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.822916 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.834692 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.847790 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.880151 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.880196 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.880205 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.880221 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.880232 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:08Z","lastTransitionTime":"2025-09-30T09:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.889229 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.907718 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.924219 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.940397 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.955996 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.976860 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"message\\\":\\\"/multus-additional-cni-plugins-p4xvk\\\\nF0930 09:50:07.486769 6349 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z]\\\\nI0930 09:50:07.486778 6349 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI0930 09:50:07.486723 6349 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-operator]} name:Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:50:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.982377 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.982414 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.982423 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.982439 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.982448 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:08Z","lastTransitionTime":"2025-09-30T09:50:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:08 crc kubenswrapper[4730]: I0930 09:50:08.990276 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:08Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.006245 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4482712-8a6c-4e35-8e85-0777588ab827\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d84dba8205ee9f4469b0277711b38ff82a1068b3f1cd951ea4a4eefab51ba88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58b48296b6764c18fc1b5163c1ef503b124b97f11f287fbbbc5b1144ecd06bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://461de41b45c19fbaf8e606cd30458759972ca804bac7e4cd5495efd8f30280fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:09Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.023302 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:09Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.039043 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:09Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.056026 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:09Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.091274 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.091312 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.091322 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.091338 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.091347 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:09Z","lastTransitionTime":"2025-09-30T09:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.194144 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.194197 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.194207 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.194221 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.194231 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:09Z","lastTransitionTime":"2025-09-30T09:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.296943 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.297030 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.297055 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.297125 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.297151 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:09Z","lastTransitionTime":"2025-09-30T09:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.380463 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:09 crc kubenswrapper[4730]: E0930 09:50:09.380658 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.401008 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.401047 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.401057 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.401073 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.401082 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:09Z","lastTransitionTime":"2025-09-30T09:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.503736 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.503801 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.503814 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.503835 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.503852 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:09Z","lastTransitionTime":"2025-09-30T09:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.606788 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.606906 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.606917 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.606934 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.606946 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:09Z","lastTransitionTime":"2025-09-30T09:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.709943 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.709997 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.710009 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.710026 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.710038 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:09Z","lastTransitionTime":"2025-09-30T09:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.813317 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.813384 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.813403 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.813431 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.813449 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:09Z","lastTransitionTime":"2025-09-30T09:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.916551 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.916609 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.916642 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.916666 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:09 crc kubenswrapper[4730]: I0930 09:50:09.916680 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:09Z","lastTransitionTime":"2025-09-30T09:50:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.019938 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.019992 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.020006 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.020025 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.020040 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:10Z","lastTransitionTime":"2025-09-30T09:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.123358 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.123458 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.123480 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.123507 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.123519 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:10Z","lastTransitionTime":"2025-09-30T09:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.227106 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.227162 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.227175 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.227196 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.227208 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:10Z","lastTransitionTime":"2025-09-30T09:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.330050 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.330106 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.330117 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.330136 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.330149 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:10Z","lastTransitionTime":"2025-09-30T09:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.380957 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.380975 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:10 crc kubenswrapper[4730]: E0930 09:50:10.381191 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:10 crc kubenswrapper[4730]: E0930 09:50:10.381368 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.381492 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:10 crc kubenswrapper[4730]: E0930 09:50:10.381750 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.433812 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.433879 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.433897 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.433922 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.433939 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:10Z","lastTransitionTime":"2025-09-30T09:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.536499 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.536795 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.536812 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.536828 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.536840 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:10Z","lastTransitionTime":"2025-09-30T09:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.639938 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.639991 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.640005 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.640029 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.640044 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:10Z","lastTransitionTime":"2025-09-30T09:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.742984 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.743062 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.743085 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.743113 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.743129 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:10Z","lastTransitionTime":"2025-09-30T09:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.846457 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.846517 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.846535 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.846561 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.846576 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:10Z","lastTransitionTime":"2025-09-30T09:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.950915 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.950977 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.950992 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.951018 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:10 crc kubenswrapper[4730]: I0930 09:50:10.951038 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:10Z","lastTransitionTime":"2025-09-30T09:50:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.054375 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.054424 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.054437 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.054458 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.054471 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:11Z","lastTransitionTime":"2025-09-30T09:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.157395 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.157442 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.157457 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.157477 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.157494 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:11Z","lastTransitionTime":"2025-09-30T09:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.260301 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.260610 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.260763 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.260862 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.260942 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:11Z","lastTransitionTime":"2025-09-30T09:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.364505 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.364561 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.364574 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.364595 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.364608 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:11Z","lastTransitionTime":"2025-09-30T09:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.380884 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:11 crc kubenswrapper[4730]: E0930 09:50:11.381081 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.468834 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.468909 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.468953 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.468979 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.468993 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:11Z","lastTransitionTime":"2025-09-30T09:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.572552 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.573438 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.573492 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.573519 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.573534 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:11Z","lastTransitionTime":"2025-09-30T09:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.677415 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.677466 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.677475 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.677493 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.677503 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:11Z","lastTransitionTime":"2025-09-30T09:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.780839 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.780931 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.780944 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.780966 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.780980 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:11Z","lastTransitionTime":"2025-09-30T09:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.884329 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.884372 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.884385 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.884403 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.884412 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:11Z","lastTransitionTime":"2025-09-30T09:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.987109 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.987234 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.987261 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.987297 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:11 crc kubenswrapper[4730]: I0930 09:50:11.987327 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:11Z","lastTransitionTime":"2025-09-30T09:50:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.090574 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.090650 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.090662 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.090688 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.090705 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:12Z","lastTransitionTime":"2025-09-30T09:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.193820 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.193871 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.193884 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.193903 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.193916 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:12Z","lastTransitionTime":"2025-09-30T09:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.297160 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.297200 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.297212 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.297233 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.297248 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:12Z","lastTransitionTime":"2025-09-30T09:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.380567 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.380567 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:12 crc kubenswrapper[4730]: E0930 09:50:12.380786 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.380854 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:12 crc kubenswrapper[4730]: E0930 09:50:12.381030 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:12 crc kubenswrapper[4730]: E0930 09:50:12.381202 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.400583 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.400662 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.400672 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.400689 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.400699 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:12Z","lastTransitionTime":"2025-09-30T09:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.504205 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.504267 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.504282 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.504301 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.504317 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:12Z","lastTransitionTime":"2025-09-30T09:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.607552 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.607602 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.607640 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.607664 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.607676 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:12Z","lastTransitionTime":"2025-09-30T09:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.711246 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.711324 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.711364 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.711403 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.711429 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:12Z","lastTransitionTime":"2025-09-30T09:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.814579 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.814646 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.814663 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.814682 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.814696 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:12Z","lastTransitionTime":"2025-09-30T09:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.918082 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.918146 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.918156 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.918174 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:12 crc kubenswrapper[4730]: I0930 09:50:12.918187 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:12Z","lastTransitionTime":"2025-09-30T09:50:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.021008 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.021067 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.021083 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.021101 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.021113 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.124166 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.124215 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.124225 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.124242 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.124251 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.226449 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.226500 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.226512 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.226531 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.226543 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.329881 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.329951 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.329970 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.329997 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.330015 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.379848 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:13 crc kubenswrapper[4730]: E0930 09:50:13.380068 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.433266 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.433336 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.433351 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.433372 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.433386 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.457435 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.457527 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.457550 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.457580 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.457670 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: E0930 09:50:13.473257 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:13Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.482170 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.482230 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.482243 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.482263 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.482276 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: E0930 09:50:13.500677 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:13Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.506042 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.506087 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.506098 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.506115 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.506125 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: E0930 09:50:13.520356 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:13Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.525758 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.525812 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.525824 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.525843 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.525854 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: E0930 09:50:13.540031 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:13Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.544436 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.544479 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.544490 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.544512 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.544522 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: E0930 09:50:13.557274 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:13Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:13 crc kubenswrapper[4730]: E0930 09:50:13.557427 4730 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.559516 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.559565 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.559580 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.559601 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.559640 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.662587 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.662689 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.662702 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.662722 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.662733 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.765204 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.765258 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.765270 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.765289 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.765305 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.867595 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.867834 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.867862 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.867891 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.867913 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.971562 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.971685 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.971711 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.971743 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:13 crc kubenswrapper[4730]: I0930 09:50:13.971765 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:13Z","lastTransitionTime":"2025-09-30T09:50:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.075488 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.075545 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.075558 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.075579 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.075596 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:14Z","lastTransitionTime":"2025-09-30T09:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.178982 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.179025 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.179034 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.179049 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.179061 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:14Z","lastTransitionTime":"2025-09-30T09:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.281716 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.281790 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.281804 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.281826 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.281843 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:14Z","lastTransitionTime":"2025-09-30T09:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.380525 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.380534 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:14 crc kubenswrapper[4730]: E0930 09:50:14.380694 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:14 crc kubenswrapper[4730]: E0930 09:50:14.380811 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.380551 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:14 crc kubenswrapper[4730]: E0930 09:50:14.380936 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.384914 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.384962 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.384981 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.385011 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.385035 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:14Z","lastTransitionTime":"2025-09-30T09:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.487914 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.487963 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.487980 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.488004 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.488021 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:14Z","lastTransitionTime":"2025-09-30T09:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.590806 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.590856 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.590866 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.590881 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.590893 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:14Z","lastTransitionTime":"2025-09-30T09:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.694180 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.694245 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.694263 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.694332 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.694352 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:14Z","lastTransitionTime":"2025-09-30T09:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.797507 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.797908 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.798025 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.798172 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.798325 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:14Z","lastTransitionTime":"2025-09-30T09:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.901026 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.901420 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.901488 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.901566 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:14 crc kubenswrapper[4730]: I0930 09:50:14.901679 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:14Z","lastTransitionTime":"2025-09-30T09:50:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.006139 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.006219 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.006244 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.006276 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.006303 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:15Z","lastTransitionTime":"2025-09-30T09:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.109693 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.109749 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.109761 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.109783 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.109797 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:15Z","lastTransitionTime":"2025-09-30T09:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.212827 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.212873 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.212885 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.212903 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.212916 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:15Z","lastTransitionTime":"2025-09-30T09:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.315890 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.316147 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.316265 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.316390 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.316482 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:15Z","lastTransitionTime":"2025-09-30T09:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.379859 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:15 crc kubenswrapper[4730]: E0930 09:50:15.380369 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.420078 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.420149 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.420170 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.420199 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.420222 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:15Z","lastTransitionTime":"2025-09-30T09:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.523161 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.523265 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.523281 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.523301 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.523345 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:15Z","lastTransitionTime":"2025-09-30T09:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.626851 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.626904 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.626916 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.626934 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.626948 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:15Z","lastTransitionTime":"2025-09-30T09:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.730266 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.730343 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.730358 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.730379 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.730393 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:15Z","lastTransitionTime":"2025-09-30T09:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.833572 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.833667 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.833685 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.833710 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.833726 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:15Z","lastTransitionTime":"2025-09-30T09:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.936471 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.936514 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.936524 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.936539 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:15 crc kubenswrapper[4730]: I0930 09:50:15.936551 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:15Z","lastTransitionTime":"2025-09-30T09:50:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.039581 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.039661 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.039671 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.039687 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.039699 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:16Z","lastTransitionTime":"2025-09-30T09:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.142656 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.142742 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.142756 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.142776 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.142788 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:16Z","lastTransitionTime":"2025-09-30T09:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.245572 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.245677 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.245701 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.245727 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.245746 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:16Z","lastTransitionTime":"2025-09-30T09:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.348134 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.348183 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.348194 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.348212 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.348222 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:16Z","lastTransitionTime":"2025-09-30T09:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.380328 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:16 crc kubenswrapper[4730]: E0930 09:50:16.380443 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.380662 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:16 crc kubenswrapper[4730]: E0930 09:50:16.380708 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.380888 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:16 crc kubenswrapper[4730]: E0930 09:50:16.380940 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.392341 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.404172 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.414503 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.427038 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.448427 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"message\\\":\\\"/multus-additional-cni-plugins-p4xvk\\\\nF0930 09:50:07.486769 6349 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z]\\\\nI0930 09:50:07.486778 6349 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI0930 09:50:07.486723 6349 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-operator]} name:Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:50:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.451451 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.451528 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.451574 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.451636 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.451657 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:16Z","lastTransitionTime":"2025-09-30T09:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.462086 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4482712-8a6c-4e35-8e85-0777588ab827\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d84dba8205ee9f4469b0277711b38ff82a1068b3f1cd951ea4a4eefab51ba88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58b48296b6764c18fc1b5163c1ef503b124b97f11f287fbbbc5b1144ecd06bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://461de41b45c19fbaf8e606cd30458759972ca804bac7e4cd5495efd8f30280fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.475336 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.488287 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.502244 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.512750 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.523196 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.537429 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.550879 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.555814 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.555879 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.555892 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.555915 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.555931 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:16Z","lastTransitionTime":"2025-09-30T09:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.563811 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.579050 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.590157 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.601005 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:16Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.658527 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.658852 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.658968 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.659087 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.659195 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:16Z","lastTransitionTime":"2025-09-30T09:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.762090 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.762441 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.762555 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.762685 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.762804 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:16Z","lastTransitionTime":"2025-09-30T09:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.865395 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.865920 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.866114 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.866303 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.866479 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:16Z","lastTransitionTime":"2025-09-30T09:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.972102 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.972243 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.972295 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.972440 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:16 crc kubenswrapper[4730]: I0930 09:50:16.972465 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:16Z","lastTransitionTime":"2025-09-30T09:50:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.076140 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.076188 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.076201 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.076221 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.076231 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:17Z","lastTransitionTime":"2025-09-30T09:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.180234 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.180286 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.180297 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.180315 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.180327 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:17Z","lastTransitionTime":"2025-09-30T09:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.283353 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.283405 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.283420 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.283445 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.283461 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:17Z","lastTransitionTime":"2025-09-30T09:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.380658 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:17 crc kubenswrapper[4730]: E0930 09:50:17.380949 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.387506 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.387592 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.387658 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.387689 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.387707 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:17Z","lastTransitionTime":"2025-09-30T09:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.490814 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.490859 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.490870 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.490890 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.490902 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:17Z","lastTransitionTime":"2025-09-30T09:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.593233 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.593299 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.593308 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.593346 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.593358 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:17Z","lastTransitionTime":"2025-09-30T09:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.696600 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.696662 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.696674 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.696692 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.696704 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:17Z","lastTransitionTime":"2025-09-30T09:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.799236 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.799275 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.799285 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.799301 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.799311 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:17Z","lastTransitionTime":"2025-09-30T09:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.902142 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.902212 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.902230 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.902254 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:17 crc kubenswrapper[4730]: I0930 09:50:17.902268 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:17Z","lastTransitionTime":"2025-09-30T09:50:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.004809 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.004864 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.004879 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.004899 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.004910 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:18Z","lastTransitionTime":"2025-09-30T09:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.108505 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.108547 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.108557 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.108574 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.108589 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:18Z","lastTransitionTime":"2025-09-30T09:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.211544 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.211589 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.211602 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.211635 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.211650 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:18Z","lastTransitionTime":"2025-09-30T09:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.314449 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.314494 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.314504 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.314522 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.314532 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:18Z","lastTransitionTime":"2025-09-30T09:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.380184 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.380228 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.380300 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:18 crc kubenswrapper[4730]: E0930 09:50:18.380331 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:18 crc kubenswrapper[4730]: E0930 09:50:18.380443 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:18 crc kubenswrapper[4730]: E0930 09:50:18.380557 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.417486 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.417806 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.417883 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.417959 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.418024 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:18Z","lastTransitionTime":"2025-09-30T09:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.520784 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.521149 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.521299 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.521525 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.521714 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:18Z","lastTransitionTime":"2025-09-30T09:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.624082 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.624848 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.624940 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.625029 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.625110 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:18Z","lastTransitionTime":"2025-09-30T09:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.729160 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.729563 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.729689 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.729795 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.729875 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:18Z","lastTransitionTime":"2025-09-30T09:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.832630 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.832668 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.832677 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.832691 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.832702 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:18Z","lastTransitionTime":"2025-09-30T09:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.934742 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.934831 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.934844 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.934878 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:18 crc kubenswrapper[4730]: I0930 09:50:18.934888 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:18Z","lastTransitionTime":"2025-09-30T09:50:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.037675 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.037722 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.037732 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.037747 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.037756 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:19Z","lastTransitionTime":"2025-09-30T09:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.140930 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.140985 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.140997 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.141017 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.141033 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:19Z","lastTransitionTime":"2025-09-30T09:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.243568 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.243637 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.243653 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.243672 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.243685 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:19Z","lastTransitionTime":"2025-09-30T09:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.347180 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.347224 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.347237 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.347257 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.347271 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:19Z","lastTransitionTime":"2025-09-30T09:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.380918 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:19 crc kubenswrapper[4730]: E0930 09:50:19.381134 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.449661 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.449706 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.449721 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.449745 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.449760 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:19Z","lastTransitionTime":"2025-09-30T09:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.558396 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.558578 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.558590 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.558679 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.558692 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:19Z","lastTransitionTime":"2025-09-30T09:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.661494 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.661550 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.661560 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.661578 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.661589 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:19Z","lastTransitionTime":"2025-09-30T09:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.764764 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.764838 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.764852 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.764876 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.764891 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:19Z","lastTransitionTime":"2025-09-30T09:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.866896 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.866958 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.866976 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.866999 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.867017 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:19Z","lastTransitionTime":"2025-09-30T09:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.969478 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.969528 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.969542 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.969561 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:19 crc kubenswrapper[4730]: I0930 09:50:19.969576 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:19Z","lastTransitionTime":"2025-09-30T09:50:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.072869 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.072948 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.072968 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.073005 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.073025 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:20Z","lastTransitionTime":"2025-09-30T09:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.176250 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.176287 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.176297 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.176313 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.176323 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:20Z","lastTransitionTime":"2025-09-30T09:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.278485 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.278525 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.278537 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.278554 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.278566 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:20Z","lastTransitionTime":"2025-09-30T09:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.379845 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.379976 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.380066 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:20 crc kubenswrapper[4730]: E0930 09:50:20.380138 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:20 crc kubenswrapper[4730]: E0930 09:50:20.380269 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:20 crc kubenswrapper[4730]: E0930 09:50:20.379998 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.381974 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.382148 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.382158 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.382174 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.382186 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:20Z","lastTransitionTime":"2025-09-30T09:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.485051 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.485097 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.485107 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.485125 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.485139 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:20Z","lastTransitionTime":"2025-09-30T09:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.588239 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.588289 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.588301 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.588319 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.588332 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:20Z","lastTransitionTime":"2025-09-30T09:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.691294 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.691342 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.691354 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.691374 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.691387 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:20Z","lastTransitionTime":"2025-09-30T09:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.795870 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.795930 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.795948 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.795973 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.795988 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:20Z","lastTransitionTime":"2025-09-30T09:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.899501 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.899546 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.899560 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.899582 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:20 crc kubenswrapper[4730]: I0930 09:50:20.899596 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:20Z","lastTransitionTime":"2025-09-30T09:50:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.002215 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.002271 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.002284 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.002330 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.002345 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:21Z","lastTransitionTime":"2025-09-30T09:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.105010 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.105085 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.105102 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.105124 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.105139 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:21Z","lastTransitionTime":"2025-09-30T09:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.208321 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.208392 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.208404 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.208424 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.208435 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:21Z","lastTransitionTime":"2025-09-30T09:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.310862 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.310935 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.310944 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.310962 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.310974 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:21Z","lastTransitionTime":"2025-09-30T09:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.380281 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:21 crc kubenswrapper[4730]: E0930 09:50:21.380713 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.381006 4730 scope.go:117] "RemoveContainer" containerID="920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1" Sep 30 09:50:21 crc kubenswrapper[4730]: E0930 09:50:21.381150 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.413219 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.413287 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.413298 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.413335 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.413371 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:21Z","lastTransitionTime":"2025-09-30T09:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.516269 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.516311 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.516321 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.516337 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.516347 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:21Z","lastTransitionTime":"2025-09-30T09:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.619559 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.619603 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.619636 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.619653 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.619665 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:21Z","lastTransitionTime":"2025-09-30T09:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.722298 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.722349 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.722358 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.722378 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.722389 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:21Z","lastTransitionTime":"2025-09-30T09:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.824343 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.824701 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.824786 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.824873 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.824952 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:21Z","lastTransitionTime":"2025-09-30T09:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.927714 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.927768 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.927784 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.927802 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:21 crc kubenswrapper[4730]: I0930 09:50:21.927812 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:21Z","lastTransitionTime":"2025-09-30T09:50:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.031378 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.031426 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.031438 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.031459 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.031472 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:22Z","lastTransitionTime":"2025-09-30T09:50:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.134121 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.134169 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.134180 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.134196 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.134208 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:22Z","lastTransitionTime":"2025-09-30T09:50:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.237358 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.237419 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.237429 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.237448 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.237459 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:22Z","lastTransitionTime":"2025-09-30T09:50:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.340662 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.340716 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.340728 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.340745 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.340753 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:22Z","lastTransitionTime":"2025-09-30T09:50:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.380184 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.380235 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.380324 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:22 crc kubenswrapper[4730]: E0930 09:50:22.380900 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:22 crc kubenswrapper[4730]: E0930 09:50:22.381032 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:22 crc kubenswrapper[4730]: E0930 09:50:22.381158 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.443522 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.443561 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.443570 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.443586 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.443596 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:22Z","lastTransitionTime":"2025-09-30T09:50:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.545829 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.545872 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.545884 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.545902 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.545916 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:22Z","lastTransitionTime":"2025-09-30T09:50:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.648409 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.648446 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.648455 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.648468 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.648477 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:22Z","lastTransitionTime":"2025-09-30T09:50:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.750604 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.750749 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.750758 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.750778 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.750789 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:22Z","lastTransitionTime":"2025-09-30T09:50:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.853594 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.853665 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.853677 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.853697 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.853711 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:22Z","lastTransitionTime":"2025-09-30T09:50:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.956642 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.956702 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.956715 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.956736 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:22 crc kubenswrapper[4730]: I0930 09:50:22.956748 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:22Z","lastTransitionTime":"2025-09-30T09:50:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.058800 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.058839 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.058849 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.058865 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.058876 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.161570 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.161632 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.161657 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.161682 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.161700 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.264533 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.264576 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.264586 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.264632 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.264653 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.366961 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.366997 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.367005 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.367019 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.367029 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.380587 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:23 crc kubenswrapper[4730]: E0930 09:50:23.380744 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.451121 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:23 crc kubenswrapper[4730]: E0930 09:50:23.451308 4730 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:50:23 crc kubenswrapper[4730]: E0930 09:50:23.451405 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs podName:be86a67e-c663-4551-9ecf-a8c2a9801cd7 nodeName:}" failed. No retries permitted until 2025-09-30 09:50:55.451381348 +0000 UTC m=+99.784641341 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs") pod "network-metrics-daemon-dqqrb" (UID: "be86a67e-c663-4551-9ecf-a8c2a9801cd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.470232 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.470269 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.470279 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.470293 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.470303 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.572485 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.572540 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.572552 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.572572 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.572585 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.675293 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.675345 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.675354 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.675368 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.675378 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.778789 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.778837 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.778846 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.778863 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.778874 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.798189 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.798335 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.798351 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.798400 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.798416 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: E0930 09:50:23.812117 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:23Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.816166 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.816200 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.816217 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.816234 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.816245 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: E0930 09:50:23.834084 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:23Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.838384 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.838414 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.838425 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.838443 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.838455 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: E0930 09:50:23.851891 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:23Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.855465 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.855501 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.855512 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.855528 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.855539 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: E0930 09:50:23.866505 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:23Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.870240 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.870290 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.870310 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.870337 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.870351 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: E0930 09:50:23.882156 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:23Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:23 crc kubenswrapper[4730]: E0930 09:50:23.882276 4730 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.883943 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.883975 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.883987 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.884003 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.884014 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.986590 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.986688 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.986702 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.986720 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:23 crc kubenswrapper[4730]: I0930 09:50:23.986734 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:23Z","lastTransitionTime":"2025-09-30T09:50:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.089125 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.089168 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.089179 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.089194 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.089204 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:24Z","lastTransitionTime":"2025-09-30T09:50:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.191817 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.191852 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.191863 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.191880 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.191891 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:24Z","lastTransitionTime":"2025-09-30T09:50:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.294382 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.294406 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.294414 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.294429 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.294438 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:24Z","lastTransitionTime":"2025-09-30T09:50:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.381931 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.381960 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.382073 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:24 crc kubenswrapper[4730]: E0930 09:50:24.382280 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:24 crc kubenswrapper[4730]: E0930 09:50:24.382346 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:24 crc kubenswrapper[4730]: E0930 09:50:24.382429 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.396896 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.396930 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.396943 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.396960 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.396973 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:24Z","lastTransitionTime":"2025-09-30T09:50:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.499538 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.499587 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.499599 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.499635 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.499651 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:24Z","lastTransitionTime":"2025-09-30T09:50:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.602188 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.602241 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.602254 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.602274 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.602285 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:24Z","lastTransitionTime":"2025-09-30T09:50:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.704317 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.704366 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.704376 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.704392 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.704404 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:24Z","lastTransitionTime":"2025-09-30T09:50:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.807563 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.807603 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.807634 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.807654 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.807668 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:24Z","lastTransitionTime":"2025-09-30T09:50:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.910574 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.910683 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.910697 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.910715 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:24 crc kubenswrapper[4730]: I0930 09:50:24.910729 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:24Z","lastTransitionTime":"2025-09-30T09:50:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.013110 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.013414 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.013485 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.013554 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.013646 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:25Z","lastTransitionTime":"2025-09-30T09:50:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.117126 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.117649 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.117831 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.117987 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.118170 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:25Z","lastTransitionTime":"2025-09-30T09:50:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.230676 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.230728 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.230738 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.230757 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.230771 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:25Z","lastTransitionTime":"2025-09-30T09:50:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.333558 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.333604 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.333634 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.333649 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.333659 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:25Z","lastTransitionTime":"2025-09-30T09:50:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.380315 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:25 crc kubenswrapper[4730]: E0930 09:50:25.380741 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.436658 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.436697 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.436710 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.436727 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.436771 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:25Z","lastTransitionTime":"2025-09-30T09:50:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.539985 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.540058 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.540076 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.540104 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.540122 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:25Z","lastTransitionTime":"2025-09-30T09:50:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.651766 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.651827 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.651841 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.651872 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.651887 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:25Z","lastTransitionTime":"2025-09-30T09:50:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.755162 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.755206 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.755215 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.755233 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.755245 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:25Z","lastTransitionTime":"2025-09-30T09:50:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.857733 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.857764 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.857774 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.857788 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.857797 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:25Z","lastTransitionTime":"2025-09-30T09:50:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.960889 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.961282 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.961406 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.961509 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:25 crc kubenswrapper[4730]: I0930 09:50:25.961602 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:25Z","lastTransitionTime":"2025-09-30T09:50:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.064464 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.064774 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.064869 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.064964 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.065048 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:26Z","lastTransitionTime":"2025-09-30T09:50:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.168033 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.168355 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.168444 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.168539 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.168641 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:26Z","lastTransitionTime":"2025-09-30T09:50:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.271697 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.271757 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.271771 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.271832 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.271849 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:26Z","lastTransitionTime":"2025-09-30T09:50:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.374841 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.374909 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.374923 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.374944 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.374957 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:26Z","lastTransitionTime":"2025-09-30T09:50:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.380459 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.380476 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:26 crc kubenswrapper[4730]: E0930 09:50:26.380596 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.380678 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:26 crc kubenswrapper[4730]: E0930 09:50:26.380819 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:26 crc kubenswrapper[4730]: E0930 09:50:26.380929 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.397455 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4482712-8a6c-4e35-8e85-0777588ab827\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d84dba8205ee9f4469b0277711b38ff82a1068b3f1cd951ea4a4eefab51ba88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58b48296b6764c18fc1b5163c1ef503b124b97f11f287fbbbc5b1144ecd06bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://461de41b45c19fbaf8e606cd30458759972ca804bac7e4cd5495efd8f30280fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.410115 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.421668 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.436486 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.449104 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.462772 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.474335 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.477081 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.477216 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.477312 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.477393 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.477489 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:26Z","lastTransitionTime":"2025-09-30T09:50:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.486350 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.496274 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.507283 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.518730 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.527502 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.541675 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.556919 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.569336 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.580707 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.581055 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.581178 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.581314 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.581448 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:26Z","lastTransitionTime":"2025-09-30T09:50:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.589081 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"message\\\":\\\"/multus-additional-cni-plugins-p4xvk\\\\nF0930 09:50:07.486769 6349 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z]\\\\nI0930 09:50:07.486778 6349 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI0930 09:50:07.486723 6349 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-operator]} name:Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:50:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.600491 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.684695 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.684996 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.685066 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.685144 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.685251 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:26Z","lastTransitionTime":"2025-09-30T09:50:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.787665 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.787755 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.787768 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.787788 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.787800 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:26Z","lastTransitionTime":"2025-09-30T09:50:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.817032 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t2frc_98a6f8df-1ac8-4652-8074-90cb180311ad/kube-multus/0.log" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.817367 4730 generic.go:334] "Generic (PLEG): container finished" podID="98a6f8df-1ac8-4652-8074-90cb180311ad" containerID="ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d" exitCode=1 Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.817465 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t2frc" event={"ID":"98a6f8df-1ac8-4652-8074-90cb180311ad","Type":"ContainerDied","Data":"ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d"} Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.817908 4730 scope.go:117] "RemoveContainer" containerID="ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.830682 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.842159 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.858175 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.874688 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.889526 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.890532 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.890570 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.890583 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.890599 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.890626 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:26Z","lastTransitionTime":"2025-09-30T09:50:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.904253 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.916550 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.929942 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.944575 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.957655 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.968982 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.981658 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:26Z\\\",\\\"message\\\":\\\"2025-09-30T09:49:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278\\\\n2025-09-30T09:49:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278 to /host/opt/cni/bin/\\\\n2025-09-30T09:49:41Z [verbose] multus-daemon started\\\\n2025-09-30T09:49:41Z [verbose] Readiness Indicator file check\\\\n2025-09-30T09:50:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.993391 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.993436 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.993445 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.993463 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:26 crc kubenswrapper[4730]: I0930 09:50:26.993473 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:26Z","lastTransitionTime":"2025-09-30T09:50:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:26.999866 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"message\\\":\\\"/multus-additional-cni-plugins-p4xvk\\\\nF0930 09:50:07.486769 6349 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z]\\\\nI0930 09:50:07.486778 6349 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI0930 09:50:07.486723 6349 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-operator]} name:Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:50:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:26Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.012797 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4482712-8a6c-4e35-8e85-0777588ab827\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d84dba8205ee9f4469b0277711b38ff82a1068b3f1cd951ea4a4eefab51ba88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58b48296b6764c18fc1b5163c1ef503b124b97f11f287fbbbc5b1144ecd06bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://461de41b45c19fbaf8e606cd30458759972ca804bac7e4cd5495efd8f30280fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.026346 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.047045 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.084237 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.095412 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.095450 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.095463 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.095480 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.095490 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:27Z","lastTransitionTime":"2025-09-30T09:50:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.197630 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.197893 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.198007 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.198092 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.198171 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:27Z","lastTransitionTime":"2025-09-30T09:50:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.300529 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.300861 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.300958 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.301065 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.301134 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:27Z","lastTransitionTime":"2025-09-30T09:50:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.380386 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:27 crc kubenswrapper[4730]: E0930 09:50:27.380887 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.404139 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.404214 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.404236 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.404270 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.404294 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:27Z","lastTransitionTime":"2025-09-30T09:50:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.507594 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.507661 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.507671 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.507688 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.507699 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:27Z","lastTransitionTime":"2025-09-30T09:50:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.609768 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.609820 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.609830 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.609847 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.609857 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:27Z","lastTransitionTime":"2025-09-30T09:50:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.712446 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.712782 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.712860 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.712949 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.713041 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:27Z","lastTransitionTime":"2025-09-30T09:50:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.815943 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.816205 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.816299 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.816377 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.816456 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:27Z","lastTransitionTime":"2025-09-30T09:50:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.822837 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t2frc_98a6f8df-1ac8-4652-8074-90cb180311ad/kube-multus/0.log" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.822912 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t2frc" event={"ID":"98a6f8df-1ac8-4652-8074-90cb180311ad","Type":"ContainerStarted","Data":"9ed00cdb0dca4ffa70594334507af7834b99fd93be39a245a3b569f39154c2a1"} Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.838056 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.847652 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.858030 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.870084 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.881606 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.893908 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.906032 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.916843 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.918498 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.918527 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.918538 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.918565 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.918576 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:27Z","lastTransitionTime":"2025-09-30T09:50:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.936972 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"message\\\":\\\"/multus-additional-cni-plugins-p4xvk\\\\nF0930 09:50:07.486769 6349 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z]\\\\nI0930 09:50:07.486778 6349 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI0930 09:50:07.486723 6349 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-operator]} name:Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:50:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.949138 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.962862 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.976201 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:27 crc kubenswrapper[4730]: I0930 09:50:27.990380 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ed00cdb0dca4ffa70594334507af7834b99fd93be39a245a3b569f39154c2a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:26Z\\\",\\\"message\\\":\\\"2025-09-30T09:49:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278\\\\n2025-09-30T09:49:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278 to /host/opt/cni/bin/\\\\n2025-09-30T09:49:41Z [verbose] multus-daemon started\\\\n2025-09-30T09:49:41Z [verbose] Readiness Indicator file check\\\\n2025-09-30T09:50:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:27Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.005931 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:28Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.018818 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4482712-8a6c-4e35-8e85-0777588ab827\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d84dba8205ee9f4469b0277711b38ff82a1068b3f1cd951ea4a4eefab51ba88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58b48296b6764c18fc1b5163c1ef503b124b97f11f287fbbbc5b1144ecd06bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://461de41b45c19fbaf8e606cd30458759972ca804bac7e4cd5495efd8f30280fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:28Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.021085 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.021157 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.021171 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.021189 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.021203 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:28Z","lastTransitionTime":"2025-09-30T09:50:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.035458 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:28Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.048568 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:28Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.123851 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.123891 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.123922 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.123939 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.123950 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:28Z","lastTransitionTime":"2025-09-30T09:50:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.226679 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.226714 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.226722 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.226739 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.226750 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:28Z","lastTransitionTime":"2025-09-30T09:50:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.329090 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.329129 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.329141 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.329158 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.329170 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:28Z","lastTransitionTime":"2025-09-30T09:50:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.383080 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:28 crc kubenswrapper[4730]: E0930 09:50:28.383231 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.383472 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:28 crc kubenswrapper[4730]: E0930 09:50:28.383765 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.383903 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:28 crc kubenswrapper[4730]: E0930 09:50:28.383968 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.431372 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.431416 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.431426 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.431444 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.431454 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:28Z","lastTransitionTime":"2025-09-30T09:50:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.534376 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.534413 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.534425 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.534442 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.534453 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:28Z","lastTransitionTime":"2025-09-30T09:50:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.637627 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.637670 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.637681 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.637695 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.637707 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:28Z","lastTransitionTime":"2025-09-30T09:50:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.742256 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.742321 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.742333 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.742371 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.742382 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:28Z","lastTransitionTime":"2025-09-30T09:50:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.845671 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.845742 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.845761 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.845783 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.845801 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:28Z","lastTransitionTime":"2025-09-30T09:50:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.948849 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.948904 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.948919 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.948941 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:28 crc kubenswrapper[4730]: I0930 09:50:28.948953 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:28Z","lastTransitionTime":"2025-09-30T09:50:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.051174 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.051224 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.051237 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.051254 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.051264 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:29Z","lastTransitionTime":"2025-09-30T09:50:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.153943 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.153993 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.154004 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.154023 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.154035 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:29Z","lastTransitionTime":"2025-09-30T09:50:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.256179 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.256243 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.256255 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.256272 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.256287 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:29Z","lastTransitionTime":"2025-09-30T09:50:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.359806 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.359862 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.359877 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.359895 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.359908 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:29Z","lastTransitionTime":"2025-09-30T09:50:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.380157 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:29 crc kubenswrapper[4730]: E0930 09:50:29.380345 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.462996 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.463036 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.463045 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.463059 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.463069 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:29Z","lastTransitionTime":"2025-09-30T09:50:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.565467 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.565504 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.565513 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.565527 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.565537 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:29Z","lastTransitionTime":"2025-09-30T09:50:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.668261 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.668345 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.668360 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.668386 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.668402 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:29Z","lastTransitionTime":"2025-09-30T09:50:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.771293 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.771350 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.771366 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.771386 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.771400 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:29Z","lastTransitionTime":"2025-09-30T09:50:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.874972 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.875013 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.875023 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.875037 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.875046 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:29Z","lastTransitionTime":"2025-09-30T09:50:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.977745 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.978269 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.978470 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.978550 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:29 crc kubenswrapper[4730]: I0930 09:50:29.978621 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:29Z","lastTransitionTime":"2025-09-30T09:50:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.081492 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.081532 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.081543 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.081562 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.081574 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:30Z","lastTransitionTime":"2025-09-30T09:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.183732 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.183821 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.183833 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.183849 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.183859 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:30Z","lastTransitionTime":"2025-09-30T09:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.286570 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.286865 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.286983 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.287078 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.287206 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:30Z","lastTransitionTime":"2025-09-30T09:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.380426 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:30 crc kubenswrapper[4730]: E0930 09:50:30.380564 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.380802 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.380817 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:30 crc kubenswrapper[4730]: E0930 09:50:30.380853 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:30 crc kubenswrapper[4730]: E0930 09:50:30.381003 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.389070 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.389110 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.389122 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.389137 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.389150 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:30Z","lastTransitionTime":"2025-09-30T09:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.490895 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.491178 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.491248 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.491343 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.491439 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:30Z","lastTransitionTime":"2025-09-30T09:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.594113 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.594452 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.594580 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.594738 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.594859 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:30Z","lastTransitionTime":"2025-09-30T09:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.698056 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.698089 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.698097 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.698127 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.698137 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:30Z","lastTransitionTime":"2025-09-30T09:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.801787 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.802414 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.802689 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.802866 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.802995 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:30Z","lastTransitionTime":"2025-09-30T09:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.906276 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.906554 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.906669 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.906755 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:30 crc kubenswrapper[4730]: I0930 09:50:30.906870 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:30Z","lastTransitionTime":"2025-09-30T09:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.009494 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.010181 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.010286 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.010464 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.010596 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:31Z","lastTransitionTime":"2025-09-30T09:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.113390 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.113824 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.113930 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.114028 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.114119 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:31Z","lastTransitionTime":"2025-09-30T09:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.217199 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.217253 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.217265 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.217284 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.217299 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:31Z","lastTransitionTime":"2025-09-30T09:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.320052 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.320113 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.320125 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.320143 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.320154 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:31Z","lastTransitionTime":"2025-09-30T09:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.380650 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:31 crc kubenswrapper[4730]: E0930 09:50:31.380814 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.423088 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.423128 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.423140 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.423162 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.423176 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:31Z","lastTransitionTime":"2025-09-30T09:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.525780 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.525852 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.525869 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.525889 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.525905 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:31Z","lastTransitionTime":"2025-09-30T09:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.628715 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.628758 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.628771 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.628788 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.628801 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:31Z","lastTransitionTime":"2025-09-30T09:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.731770 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.731837 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.731857 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.731895 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.731908 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:31Z","lastTransitionTime":"2025-09-30T09:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.834118 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.834174 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.834188 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.834210 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.834224 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:31Z","lastTransitionTime":"2025-09-30T09:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.937897 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.937945 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.937958 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.937980 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:31 crc kubenswrapper[4730]: I0930 09:50:31.937992 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:31Z","lastTransitionTime":"2025-09-30T09:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.040806 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.040861 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.040871 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.040887 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.040901 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:32Z","lastTransitionTime":"2025-09-30T09:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.143455 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.143512 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.143527 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.143546 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.143560 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:32Z","lastTransitionTime":"2025-09-30T09:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.245960 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.245998 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.246009 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.246024 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.246033 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:32Z","lastTransitionTime":"2025-09-30T09:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.349822 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.349886 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.349899 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.349929 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.349941 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:32Z","lastTransitionTime":"2025-09-30T09:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.380249 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.380323 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:32 crc kubenswrapper[4730]: E0930 09:50:32.380444 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.380494 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:32 crc kubenswrapper[4730]: E0930 09:50:32.380710 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:32 crc kubenswrapper[4730]: E0930 09:50:32.380817 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.394211 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.453157 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.453465 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.453655 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.453774 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.453851 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:32Z","lastTransitionTime":"2025-09-30T09:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.556761 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.556801 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.556813 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.556830 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.556841 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:32Z","lastTransitionTime":"2025-09-30T09:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.659173 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.659204 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.659214 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.659230 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.659240 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:32Z","lastTransitionTime":"2025-09-30T09:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.761918 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.761968 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.761980 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.761998 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.762011 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:32Z","lastTransitionTime":"2025-09-30T09:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.864226 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.864302 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.864316 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.864334 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.864346 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:32Z","lastTransitionTime":"2025-09-30T09:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.966553 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.966627 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.966640 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.966657 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:32 crc kubenswrapper[4730]: I0930 09:50:32.966668 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:32Z","lastTransitionTime":"2025-09-30T09:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.069373 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.069428 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.069439 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.069456 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.069472 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:33Z","lastTransitionTime":"2025-09-30T09:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.171576 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.171638 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.171651 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.171669 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.171679 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:33Z","lastTransitionTime":"2025-09-30T09:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.274401 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.274755 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.274837 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.274905 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.274968 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:33Z","lastTransitionTime":"2025-09-30T09:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.378368 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.378674 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.378749 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.378821 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.378879 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:33Z","lastTransitionTime":"2025-09-30T09:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.380755 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:33 crc kubenswrapper[4730]: E0930 09:50:33.380930 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.481825 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.481903 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.481920 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.481937 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.481950 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:33Z","lastTransitionTime":"2025-09-30T09:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.584158 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.584239 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.584249 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.584288 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.584300 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:33Z","lastTransitionTime":"2025-09-30T09:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.686968 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.687024 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.687038 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.687058 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.687071 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:33Z","lastTransitionTime":"2025-09-30T09:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.789666 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.789752 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.789765 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.789786 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.789800 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:33Z","lastTransitionTime":"2025-09-30T09:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.892151 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.892233 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.892255 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.892283 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.892306 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:33Z","lastTransitionTime":"2025-09-30T09:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.968324 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.968691 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.968787 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.968884 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.968965 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:33Z","lastTransitionTime":"2025-09-30T09:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:33 crc kubenswrapper[4730]: E0930 09:50:33.984563 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:33Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.989673 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.989953 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.990259 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.990468 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:33 crc kubenswrapper[4730]: I0930 09:50:33.990758 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:33Z","lastTransitionTime":"2025-09-30T09:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:34 crc kubenswrapper[4730]: E0930 09:50:34.006459 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:34Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.011883 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.011936 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.011950 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.011968 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.011981 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:34Z","lastTransitionTime":"2025-09-30T09:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:34 crc kubenswrapper[4730]: E0930 09:50:34.027648 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:34Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.032724 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.032769 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.032780 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.032805 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.032817 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:34Z","lastTransitionTime":"2025-09-30T09:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:34 crc kubenswrapper[4730]: E0930 09:50:34.047370 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:34Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.052224 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.052273 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.052288 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.052308 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.052322 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:34Z","lastTransitionTime":"2025-09-30T09:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:34 crc kubenswrapper[4730]: E0930 09:50:34.065139 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:34Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:34 crc kubenswrapper[4730]: E0930 09:50:34.065262 4730 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.067170 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.067215 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.067226 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.067246 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.067256 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:34Z","lastTransitionTime":"2025-09-30T09:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.174175 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.174337 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.174368 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.174585 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.174686 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:34Z","lastTransitionTime":"2025-09-30T09:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.278299 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.278433 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.278447 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.278466 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.278479 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:34Z","lastTransitionTime":"2025-09-30T09:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.380065 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.380124 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.380167 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:34 crc kubenswrapper[4730]: E0930 09:50:34.380240 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:34 crc kubenswrapper[4730]: E0930 09:50:34.380359 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:34 crc kubenswrapper[4730]: E0930 09:50:34.380532 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.381976 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.382034 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.382052 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.382078 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.382099 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:34Z","lastTransitionTime":"2025-09-30T09:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.382170 4730 scope.go:117] "RemoveContainer" containerID="920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.485276 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.485332 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.485346 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.485365 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.485380 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:34Z","lastTransitionTime":"2025-09-30T09:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.588053 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.588085 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.588093 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.588109 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.588119 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:34Z","lastTransitionTime":"2025-09-30T09:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.691400 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.691459 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.691475 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.691495 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.691510 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:34Z","lastTransitionTime":"2025-09-30T09:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.793733 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.793780 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.793791 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.793811 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.793825 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:34Z","lastTransitionTime":"2025-09-30T09:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.852305 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/2.log" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.855430 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerStarted","Data":"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669"} Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.856106 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.868577 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:34Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.881944 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:34Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.896451 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ed00cdb0dca4ffa70594334507af7834b99fd93be39a245a3b569f39154c2a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:26Z\\\",\\\"message\\\":\\\"2025-09-30T09:49:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278\\\\n2025-09-30T09:49:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278 to /host/opt/cni/bin/\\\\n2025-09-30T09:49:41Z [verbose] multus-daemon started\\\\n2025-09-30T09:49:41Z [verbose] Readiness Indicator file check\\\\n2025-09-30T09:50:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:34Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.897009 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.897048 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.897057 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.897073 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.897082 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:34Z","lastTransitionTime":"2025-09-30T09:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.920101 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"message\\\":\\\"/multus-additional-cni-plugins-p4xvk\\\\nF0930 09:50:07.486769 6349 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z]\\\\nI0930 09:50:07.486778 6349 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI0930 09:50:07.486723 6349 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-operator]} name:Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:50:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:34Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.935160 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:34Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.953062 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:34Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.969547 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:34Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.982450 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:34Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.997583 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:34Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.999419 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.999480 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.999499 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.999521 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:34 crc kubenswrapper[4730]: I0930 09:50:34.999536 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:34Z","lastTransitionTime":"2025-09-30T09:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.008603 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a3f577f-035f-49c0-bc29-3b7849a2214f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04522c3f227c31fd9dc48f62caeaa83a5e3c3d9ef7a60e33e6a20f41ecafdf5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1bb754504953177858ef8742f7c3889393c8478b60edfaed19f1944bf2b6e0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1bb754504953177858ef8742f7c3889393c8478b60edfaed19f1944bf2b6e0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.023320 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4482712-8a6c-4e35-8e85-0777588ab827\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d84dba8205ee9f4469b0277711b38ff82a1068b3f1cd951ea4a4eefab51ba88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58b48296b6764c18fc1b5163c1ef503b124b97f11f287fbbbc5b1144ecd06bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://461de41b45c19fbaf8e606cd30458759972ca804bac7e4cd5495efd8f30280fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.039112 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.051977 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.064795 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.076058 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.091292 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.101909 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.101950 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.101959 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.101974 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.101985 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:35Z","lastTransitionTime":"2025-09-30T09:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.107065 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.121036 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.205069 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.205115 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.205126 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.205145 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.205157 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:35Z","lastTransitionTime":"2025-09-30T09:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.308071 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.308392 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.308402 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.308416 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.308426 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:35Z","lastTransitionTime":"2025-09-30T09:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.380664 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:35 crc kubenswrapper[4730]: E0930 09:50:35.380841 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.412264 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.412925 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.412951 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.412978 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.412994 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:35Z","lastTransitionTime":"2025-09-30T09:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.515978 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.516022 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.516037 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.516055 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.516067 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:35Z","lastTransitionTime":"2025-09-30T09:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.619186 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.619672 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.619757 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.619827 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.619899 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:35Z","lastTransitionTime":"2025-09-30T09:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.722795 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.723079 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.723212 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.723314 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.723395 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:35Z","lastTransitionTime":"2025-09-30T09:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.825868 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.826173 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.826282 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.826455 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.826557 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:35Z","lastTransitionTime":"2025-09-30T09:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.860368 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/3.log" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.861143 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/2.log" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.864273 4730 generic.go:334] "Generic (PLEG): container finished" podID="823c4c28-801d-421e-b15f-02a17e300753" containerID="380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669" exitCode=1 Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.864377 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669"} Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.864438 4730 scope.go:117] "RemoveContainer" containerID="920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.865058 4730 scope.go:117] "RemoveContainer" containerID="380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669" Sep 30 09:50:35 crc kubenswrapper[4730]: E0930 09:50:35.865225 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.888259 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.901148 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.913416 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ed00cdb0dca4ffa70594334507af7834b99fd93be39a245a3b569f39154c2a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:26Z\\\",\\\"message\\\":\\\"2025-09-30T09:49:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278\\\\n2025-09-30T09:49:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278 to /host/opt/cni/bin/\\\\n2025-09-30T09:49:41Z [verbose] multus-daemon started\\\\n2025-09-30T09:49:41Z [verbose] Readiness Indicator file check\\\\n2025-09-30T09:50:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.930220 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.930291 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.930305 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.930324 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.930334 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:35Z","lastTransitionTime":"2025-09-30T09:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.931185 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"message\\\":\\\"/multus-additional-cni-plugins-p4xvk\\\\nF0930 09:50:07.486769 6349 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z]\\\\nI0930 09:50:07.486778 6349 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI0930 09:50:07.486723 6349 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-operator]} name:Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:50:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:35Z\\\",\\\"message\\\":\\\"er-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-dns-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-dns-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.174\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 09:50:35.219475 6699 services_controller.go:452] Built service openshift-dns-operator/metrics per-node LB for network=default: []services.LB{}\\\\nF0930 09:50:35.219478 6699 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.941217 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.951376 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a3f577f-035f-49c0-bc29-3b7849a2214f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04522c3f227c31fd9dc48f62caeaa83a5e3c3d9ef7a60e33e6a20f41ecafdf5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1bb754504953177858ef8742f7c3889393c8478b60edfaed19f1944bf2b6e0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1bb754504953177858ef8742f7c3889393c8478b60edfaed19f1944bf2b6e0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.963334 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4482712-8a6c-4e35-8e85-0777588ab827\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d84dba8205ee9f4469b0277711b38ff82a1068b3f1cd951ea4a4eefab51ba88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58b48296b6764c18fc1b5163c1ef503b124b97f11f287fbbbc5b1144ecd06bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://461de41b45c19fbaf8e606cd30458759972ca804bac7e4cd5495efd8f30280fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.978469 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:35 crc kubenswrapper[4730]: I0930 09:50:35.993119 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:35Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.008674 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.026357 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.032606 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.032889 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.033005 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.033072 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.033162 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:36Z","lastTransitionTime":"2025-09-30T09:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.041402 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.059725 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.075327 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.090001 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.099899 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.109970 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.120032 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.136593 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.136662 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.136673 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.136691 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.136899 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:36Z","lastTransitionTime":"2025-09-30T09:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.239994 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.240047 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.240060 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.240080 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.240096 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:36Z","lastTransitionTime":"2025-09-30T09:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.343042 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.343077 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.343085 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.343100 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.343109 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:36Z","lastTransitionTime":"2025-09-30T09:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.380906 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:36 crc kubenswrapper[4730]: E0930 09:50:36.381103 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.381267 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.381399 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:36 crc kubenswrapper[4730]: E0930 09:50:36.381432 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:36 crc kubenswrapper[4730]: E0930 09:50:36.381589 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.397203 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.409839 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.423943 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.442087 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.449832 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.449893 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.449909 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.449941 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.449957 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:36Z","lastTransitionTime":"2025-09-30T09:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.456228 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.470698 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.484505 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.495442 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.516403 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://920e864b3b47f25ccbf4d23e865008517a888ed00df9d5e474a1d43ceac0faf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"message\\\":\\\"/multus-additional-cni-plugins-p4xvk\\\\nF0930 09:50:07.486769 6349 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:07Z is after 2025-08-24T17:21:41Z]\\\\nI0930 09:50:07.486778 6349 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI0930 09:50:07.486723 6349 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-config-operator/machine-config-operator]} name:Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:50:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:35Z\\\",\\\"message\\\":\\\"er-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-dns-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-dns-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.174\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 09:50:35.219475 6699 services_controller.go:452] Built service openshift-dns-operator/metrics per-node LB for network=default: []services.LB{}\\\\nF0930 09:50:35.219478 6699 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.530273 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.545327 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.552828 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.552915 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.552929 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.552953 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.552971 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:36Z","lastTransitionTime":"2025-09-30T09:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.560347 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.574455 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ed00cdb0dca4ffa70594334507af7834b99fd93be39a245a3b569f39154c2a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:26Z\\\",\\\"message\\\":\\\"2025-09-30T09:49:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278\\\\n2025-09-30T09:49:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278 to /host/opt/cni/bin/\\\\n2025-09-30T09:49:41Z [verbose] multus-daemon started\\\\n2025-09-30T09:49:41Z [verbose] Readiness Indicator file check\\\\n2025-09-30T09:50:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.592100 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.606901 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a3f577f-035f-49c0-bc29-3b7849a2214f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04522c3f227c31fd9dc48f62caeaa83a5e3c3d9ef7a60e33e6a20f41ecafdf5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1bb754504953177858ef8742f7c3889393c8478b60edfaed19f1944bf2b6e0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1bb754504953177858ef8742f7c3889393c8478b60edfaed19f1944bf2b6e0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.620912 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4482712-8a6c-4e35-8e85-0777588ab827\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d84dba8205ee9f4469b0277711b38ff82a1068b3f1cd951ea4a4eefab51ba88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58b48296b6764c18fc1b5163c1ef503b124b97f11f287fbbbc5b1144ecd06bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://461de41b45c19fbaf8e606cd30458759972ca804bac7e4cd5495efd8f30280fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.636440 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.652877 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.655744 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.655793 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.655808 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.655833 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.655850 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:36Z","lastTransitionTime":"2025-09-30T09:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.758038 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.758130 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.758142 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.758159 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.758176 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:36Z","lastTransitionTime":"2025-09-30T09:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.861363 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.861427 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.861445 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.861473 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.861490 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:36Z","lastTransitionTime":"2025-09-30T09:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.868558 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/3.log" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.872308 4730 scope.go:117] "RemoveContainer" containerID="380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669" Sep 30 09:50:36 crc kubenswrapper[4730]: E0930 09:50:36.872529 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.885495 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.898628 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.914915 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ed00cdb0dca4ffa70594334507af7834b99fd93be39a245a3b569f39154c2a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:26Z\\\",\\\"message\\\":\\\"2025-09-30T09:49:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278\\\\n2025-09-30T09:49:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278 to /host/opt/cni/bin/\\\\n2025-09-30T09:49:41Z [verbose] multus-daemon started\\\\n2025-09-30T09:49:41Z [verbose] Readiness Indicator file check\\\\n2025-09-30T09:50:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.939067 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:35Z\\\",\\\"message\\\":\\\"er-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-dns-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-dns-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.174\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 09:50:35.219475 6699 services_controller.go:452] Built service openshift-dns-operator/metrics per-node LB for network=default: []services.LB{}\\\\nF0930 09:50:35.219478 6699 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:50:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.952354 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.964507 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.964571 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.964589 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.964656 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.964677 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:36Z","lastTransitionTime":"2025-09-30T09:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.966419 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:36 crc kubenswrapper[4730]: I0930 09:50:36.985900 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:36Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.003759 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:37Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.020119 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:37Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.034181 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a3f577f-035f-49c0-bc29-3b7849a2214f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04522c3f227c31fd9dc48f62caeaa83a5e3c3d9ef7a60e33e6a20f41ecafdf5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1bb754504953177858ef8742f7c3889393c8478b60edfaed19f1944bf2b6e0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1bb754504953177858ef8742f7c3889393c8478b60edfaed19f1944bf2b6e0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:37Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.048595 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4482712-8a6c-4e35-8e85-0777588ab827\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d84dba8205ee9f4469b0277711b38ff82a1068b3f1cd951ea4a4eefab51ba88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58b48296b6764c18fc1b5163c1ef503b124b97f11f287fbbbc5b1144ecd06bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://461de41b45c19fbaf8e606cd30458759972ca804bac7e4cd5495efd8f30280fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:37Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.061587 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:37Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.070344 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.070393 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.070404 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.070423 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.070436 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:37Z","lastTransitionTime":"2025-09-30T09:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.078844 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:37Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.091585 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:37Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.103628 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:37Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.117671 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:37Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.131968 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:37Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.144639 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:37Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.173172 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.173224 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.173235 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.173255 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.173268 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:37Z","lastTransitionTime":"2025-09-30T09:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.276989 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.277065 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.277076 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.277097 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.277109 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:37Z","lastTransitionTime":"2025-09-30T09:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.379750 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.380198 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.380233 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.380265 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.380281 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.380292 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:37Z","lastTransitionTime":"2025-09-30T09:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:37 crc kubenswrapper[4730]: E0930 09:50:37.380477 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.482561 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.482623 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.482638 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.482657 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.482672 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:37Z","lastTransitionTime":"2025-09-30T09:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.584923 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.585002 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.585023 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.585066 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.585086 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:37Z","lastTransitionTime":"2025-09-30T09:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.688074 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.688121 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.688131 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.688151 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.688164 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:37Z","lastTransitionTime":"2025-09-30T09:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.790653 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.790691 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.790704 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.790723 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.790733 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:37Z","lastTransitionTime":"2025-09-30T09:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.894011 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.894063 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.894072 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.894087 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.894097 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:37Z","lastTransitionTime":"2025-09-30T09:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.997553 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.997582 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.997590 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.997603 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:37 crc kubenswrapper[4730]: I0930 09:50:37.997647 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:37Z","lastTransitionTime":"2025-09-30T09:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.101216 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.101259 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.101271 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.101290 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.101302 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:38Z","lastTransitionTime":"2025-09-30T09:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.204118 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.204160 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.204171 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.204190 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.204204 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:38Z","lastTransitionTime":"2025-09-30T09:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.306577 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.308043 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.308332 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.308587 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.308867 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:38Z","lastTransitionTime":"2025-09-30T09:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.380499 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.380670 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:38 crc kubenswrapper[4730]: E0930 09:50:38.380833 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:38 crc kubenswrapper[4730]: E0930 09:50:38.381106 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.381255 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:38 crc kubenswrapper[4730]: E0930 09:50:38.381473 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.412532 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.412891 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.412999 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.413082 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.413161 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:38Z","lastTransitionTime":"2025-09-30T09:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.515592 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.515657 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.515669 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.515688 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.515700 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:38Z","lastTransitionTime":"2025-09-30T09:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.621446 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.621525 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.621545 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.621576 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.621591 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:38Z","lastTransitionTime":"2025-09-30T09:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.724223 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.724261 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.724273 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.724291 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.724301 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:38Z","lastTransitionTime":"2025-09-30T09:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.827224 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.827275 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.827288 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.827307 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.827322 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:38Z","lastTransitionTime":"2025-09-30T09:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.930508 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.930572 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.930585 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.930634 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:38 crc kubenswrapper[4730]: I0930 09:50:38.930649 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:38Z","lastTransitionTime":"2025-09-30T09:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.033840 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.033911 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.033930 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.033956 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.033974 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:39Z","lastTransitionTime":"2025-09-30T09:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.137277 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.137364 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.137376 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.137396 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.137409 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:39Z","lastTransitionTime":"2025-09-30T09:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.240425 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.240486 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.240500 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.240522 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.240536 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:39Z","lastTransitionTime":"2025-09-30T09:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.344381 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.344480 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.344519 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.344549 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.344569 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:39Z","lastTransitionTime":"2025-09-30T09:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.380925 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:39 crc kubenswrapper[4730]: E0930 09:50:39.381500 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.448354 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.448710 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.448829 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.448987 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.449120 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:39Z","lastTransitionTime":"2025-09-30T09:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.552762 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.552828 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.552848 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.552873 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.552890 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:39Z","lastTransitionTime":"2025-09-30T09:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.655917 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.655955 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.655964 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.655980 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.655991 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:39Z","lastTransitionTime":"2025-09-30T09:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.759140 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.759197 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.759210 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.759229 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.759268 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:39Z","lastTransitionTime":"2025-09-30T09:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.862335 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.862396 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.862405 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.862423 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.862434 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:39Z","lastTransitionTime":"2025-09-30T09:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.965782 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.965830 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.965843 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.965865 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:39 crc kubenswrapper[4730]: I0930 09:50:39.965879 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:39Z","lastTransitionTime":"2025-09-30T09:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.068738 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.068796 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.068808 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.068828 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.068841 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:40Z","lastTransitionTime":"2025-09-30T09:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.172227 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.172273 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.172288 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.172307 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.172319 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:40Z","lastTransitionTime":"2025-09-30T09:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.210795 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.210904 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.210978 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.211109 4730 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.211185 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.21116627 +0000 UTC m=+148.544426263 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.211280 4730 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.211418 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.211380997 +0000 UTC m=+148.544641030 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.211961 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.211915361 +0000 UTC m=+148.545175354 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.293022 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.293080 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.293097 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.293118 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.293134 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:40Z","lastTransitionTime":"2025-09-30T09:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.312381 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.312445 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.312576 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.312635 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.312646 4730 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.312659 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.312687 4730 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.312701 4730 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.312708 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.312688786 +0000 UTC m=+148.645948779 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.312762 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.312739068 +0000 UTC m=+148.645999241 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.380466 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.380529 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.380500 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.380694 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.380824 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:40 crc kubenswrapper[4730]: E0930 09:50:40.380996 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.396042 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.396081 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.396090 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.396106 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.396116 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:40Z","lastTransitionTime":"2025-09-30T09:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.499649 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.499719 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.499734 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.499764 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.499790 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:40Z","lastTransitionTime":"2025-09-30T09:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.602857 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.602923 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.602936 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.602960 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.602981 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:40Z","lastTransitionTime":"2025-09-30T09:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.706056 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.706111 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.706120 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.706135 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.706144 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:40Z","lastTransitionTime":"2025-09-30T09:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.809025 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.809084 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.809095 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.809115 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.809128 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:40Z","lastTransitionTime":"2025-09-30T09:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.912841 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.912894 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.912907 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.912923 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:40 crc kubenswrapper[4730]: I0930 09:50:40.912934 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:40Z","lastTransitionTime":"2025-09-30T09:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.016210 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.016555 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.016653 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.016757 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.016907 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:41Z","lastTransitionTime":"2025-09-30T09:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.119885 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.120394 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.120572 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.120852 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.121127 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:41Z","lastTransitionTime":"2025-09-30T09:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.224750 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.224803 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.224815 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.224835 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.224849 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:41Z","lastTransitionTime":"2025-09-30T09:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.328207 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.328279 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.328294 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.328318 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.328334 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:41Z","lastTransitionTime":"2025-09-30T09:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.380849 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:41 crc kubenswrapper[4730]: E0930 09:50:41.381038 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.431117 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.431186 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.431206 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.431231 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.431249 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:41Z","lastTransitionTime":"2025-09-30T09:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.534222 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.534590 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.534752 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.534844 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.534922 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:41Z","lastTransitionTime":"2025-09-30T09:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.638744 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.638785 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.638798 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.638827 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.638839 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:41Z","lastTransitionTime":"2025-09-30T09:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.742669 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.743198 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.743354 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.743524 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.743806 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:41Z","lastTransitionTime":"2025-09-30T09:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.847441 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.847855 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.847938 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.848039 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.848118 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:41Z","lastTransitionTime":"2025-09-30T09:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.951204 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.951270 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.951283 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.951303 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:41 crc kubenswrapper[4730]: I0930 09:50:41.951314 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:41Z","lastTransitionTime":"2025-09-30T09:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.054327 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.054398 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.054414 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.054440 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.054455 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:42Z","lastTransitionTime":"2025-09-30T09:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.158001 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.158048 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.158060 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.158080 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.158093 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:42Z","lastTransitionTime":"2025-09-30T09:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.262169 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.262244 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.262266 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.262294 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.262312 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:42Z","lastTransitionTime":"2025-09-30T09:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.365499 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.365574 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.365588 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.365650 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.365668 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:42Z","lastTransitionTime":"2025-09-30T09:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.379907 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.379941 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:42 crc kubenswrapper[4730]: E0930 09:50:42.380396 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:42 crc kubenswrapper[4730]: E0930 09:50:42.380533 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.380099 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:42 crc kubenswrapper[4730]: E0930 09:50:42.380800 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.468828 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.468919 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.468945 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.468988 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.469014 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:42Z","lastTransitionTime":"2025-09-30T09:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.572894 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.572952 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.572966 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.572988 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.573000 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:42Z","lastTransitionTime":"2025-09-30T09:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.676990 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.677556 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.677573 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.677595 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.677634 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:42Z","lastTransitionTime":"2025-09-30T09:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.780825 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.780897 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.780918 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.780941 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.780956 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:42Z","lastTransitionTime":"2025-09-30T09:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.884466 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.884522 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.884536 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.884561 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.884575 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:42Z","lastTransitionTime":"2025-09-30T09:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.987698 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.987755 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.987766 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.987784 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:42 crc kubenswrapper[4730]: I0930 09:50:42.987795 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:42Z","lastTransitionTime":"2025-09-30T09:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.090971 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.091042 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.091065 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.091098 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.091124 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:43Z","lastTransitionTime":"2025-09-30T09:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.194211 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.194276 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.194291 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.194312 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.194330 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:43Z","lastTransitionTime":"2025-09-30T09:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.296897 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.296985 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.297017 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.297050 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.297073 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:43Z","lastTransitionTime":"2025-09-30T09:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.380049 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:43 crc kubenswrapper[4730]: E0930 09:50:43.380237 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.399322 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.399387 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.399401 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.399423 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.399439 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:43Z","lastTransitionTime":"2025-09-30T09:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.502457 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.502522 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.502536 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.502559 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.502576 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:43Z","lastTransitionTime":"2025-09-30T09:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.605203 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.605245 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.605256 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.605274 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.605284 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:43Z","lastTransitionTime":"2025-09-30T09:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.709085 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.709134 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.709147 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.709167 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.709177 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:43Z","lastTransitionTime":"2025-09-30T09:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.812293 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.812381 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.812416 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.812441 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.812455 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:43Z","lastTransitionTime":"2025-09-30T09:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.916056 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.916138 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.916157 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.916186 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:43 crc kubenswrapper[4730]: I0930 09:50:43.916206 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:43Z","lastTransitionTime":"2025-09-30T09:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.019063 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.019112 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.019122 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.019144 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.019157 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.122763 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.122824 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.122838 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.122861 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.122874 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.225732 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.225800 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.225815 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.225833 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.225845 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.251822 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.251893 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.251917 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.251947 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.251970 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: E0930 09:50:44.274199 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.279705 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.279756 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.279774 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.279800 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.279818 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: E0930 09:50:44.301709 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.307865 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.307936 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.307957 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.307982 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.307999 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: E0930 09:50:44.328718 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.334800 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.334858 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.334872 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.334899 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.334914 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: E0930 09:50:44.352634 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.356509 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.356558 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.356575 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.356597 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.356634 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: E0930 09:50:44.370982 4730 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"106859cf-ef10-430a-91cd-145c67df2de1\\\",\\\"systemUUID\\\":\\\"07b44d08-082f-49ea-b265-a8fb7a484875\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:44Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:44 crc kubenswrapper[4730]: E0930 09:50:44.371178 4730 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.373940 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.373985 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.374000 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.374223 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.374239 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.380902 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.380992 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.381022 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:44 crc kubenswrapper[4730]: E0930 09:50:44.381141 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:44 crc kubenswrapper[4730]: E0930 09:50:44.381229 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:44 crc kubenswrapper[4730]: E0930 09:50:44.381295 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.477463 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.477539 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.477548 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.477568 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.477578 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.580532 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.580576 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.580586 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.580603 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.580634 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.683557 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.683646 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.683664 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.683689 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.683734 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.787020 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.787066 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.787078 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.787095 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.787107 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.890175 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.890223 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.890236 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.890255 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.890270 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.993253 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.993318 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.993331 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.993352 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:44 crc kubenswrapper[4730]: I0930 09:50:44.993365 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:44Z","lastTransitionTime":"2025-09-30T09:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.096777 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.096852 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.096877 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.096951 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.096981 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:45Z","lastTransitionTime":"2025-09-30T09:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.199767 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.199827 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.199846 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.199871 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.199887 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:45Z","lastTransitionTime":"2025-09-30T09:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.303106 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.303177 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.303191 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.303209 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.303221 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:45Z","lastTransitionTime":"2025-09-30T09:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.380509 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:45 crc kubenswrapper[4730]: E0930 09:50:45.380804 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.406006 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.406064 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.406077 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.406099 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.406117 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:45Z","lastTransitionTime":"2025-09-30T09:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.508568 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.508604 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.508634 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.508650 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.508661 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:45Z","lastTransitionTime":"2025-09-30T09:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.612092 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.612156 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.612167 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.612186 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.612197 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:45Z","lastTransitionTime":"2025-09-30T09:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.715799 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.715892 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.715917 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.715952 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.715977 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:45Z","lastTransitionTime":"2025-09-30T09:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.818880 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.818949 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.818966 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.818993 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.819020 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:45Z","lastTransitionTime":"2025-09-30T09:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.923148 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.923197 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.923207 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.923226 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:45 crc kubenswrapper[4730]: I0930 09:50:45.923237 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:45Z","lastTransitionTime":"2025-09-30T09:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.025537 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.025638 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.025666 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.025698 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.025718 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:46Z","lastTransitionTime":"2025-09-30T09:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.128167 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.128209 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.128220 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.128236 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.128249 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:46Z","lastTransitionTime":"2025-09-30T09:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.230748 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.230810 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.230830 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.230854 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.230870 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:46Z","lastTransitionTime":"2025-09-30T09:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.334394 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.334436 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.334450 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.334470 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.334484 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:46Z","lastTransitionTime":"2025-09-30T09:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.380695 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.380812 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.380716 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:46 crc kubenswrapper[4730]: E0930 09:50:46.380977 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:46 crc kubenswrapper[4730]: E0930 09:50:46.381168 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:46 crc kubenswrapper[4730]: E0930 09:50:46.381406 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.409273 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"823c4c28-801d-421e-b15f-02a17e300753\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:35Z\\\",\\\"message\\\":\\\"er-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-dns-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-dns-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.174\\\\\\\", Port:9393, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0930 09:50:35.219475 6699 services_controller.go:452] Built service openshift-dns-operator/metrics per-node LB for network=default: []services.LB{}\\\\nF0930 09:50:35.219478 6699 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:50:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jfshb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5vmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.423390 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be86a67e-c663-4551-9ecf-a8c2a9801cd7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q9sm8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:51Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-dqqrb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.436049 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.437457 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.437513 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.437527 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.437553 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.437566 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:46Z","lastTransitionTime":"2025-09-30T09:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.449505 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.466069 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-t2frc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98a6f8df-1ac8-4652-8074-90cb180311ad\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ed00cdb0dca4ffa70594334507af7834b99fd93be39a245a3b569f39154c2a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T09:50:26Z\\\",\\\"message\\\":\\\"2025-09-30T09:49:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278\\\\n2025-09-30T09:49:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9e508620-01ca-40f6-9692-76b42e358278 to /host/opt/cni/bin/\\\\n2025-09-30T09:49:41Z [verbose] multus-daemon started\\\\n2025-09-30T09:49:41Z [verbose] Readiness Indicator file check\\\\n2025-09-30T09:50:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-txcfr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-t2frc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.487993 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9871bed2-69f9-44f1-ab80-f8b4b9241e73\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b882d14f4eaee6c6ad8cdd30c6e1e63eb837c7bc2dd31974c37ffbc3307186\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84aa64a838e41f5db6010538528a9d8d9221b5fe859e1afec9d9d3c71ef90eb7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://618950d5ff8ec1ef34dc0f43e73a265aefbeb97f69a25226fb2007b4d28249a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://849eb24b3a7e62a7667c303069953f1391cdebf8f2888dc7d3967667c31c2a3e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685162f0c76d93c0aa78dc2c5c0ee794627f665afa60af9d9d0ae3ba8f5f7b59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c92fbb212b1ad8be488e639f7f88263b3d96ab0320da532d908b3c9df59ef2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://840f2a769ccdfba8933c685d07c325cf84d38bccf1d317c3b664f298aa52d878\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-472bs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-p4xvk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.499798 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a3f577f-035f-49c0-bc29-3b7849a2214f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04522c3f227c31fd9dc48f62caeaa83a5e3c3d9ef7a60e33e6a20f41ecafdf5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1bb754504953177858ef8742f7c3889393c8478b60edfaed19f1944bf2b6e0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1bb754504953177858ef8742f7c3889393c8478b60edfaed19f1944bf2b6e0f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.511157 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d4482712-8a6c-4e35-8e85-0777588ab827\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d84dba8205ee9f4469b0277711b38ff82a1068b3f1cd951ea4a4eefab51ba88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://58b48296b6764c18fc1b5163c1ef503b124b97f11f287fbbbc5b1144ecd06bbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://461de41b45c19fbaf8e606cd30458759972ca804bac7e4cd5495efd8f30280fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0abf086d2b6dabfe2b74ed6b31b94bbd61222698aeb420775118dcca92243bbd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.522494 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.535426 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96ab57c0ead54e1fe3bbf9a7fe3fc8375d1e5ebd29840f01bb2022de2f3bfdc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://373aa4d8084c958728b4a3f16056068fb328a59e55a5f50785647d78fac0e424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.539572 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.539636 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.539647 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.539665 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.539679 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:46Z","lastTransitionTime":"2025-09-30T09:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.552765 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95bd4436-8399-478d-9552-c9ba5ae8f327\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b7761b460113a85528a11a7475d5245ce3009a0dd413a8deec822c6c0eac07b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9d975\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-d4zf9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.563394 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nw55k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0a99445-b60e-4c47-b4ae-00b9983d8a15\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b566e571569572866ddcfc76684ec3f1fdabc0f890aa5fd8588d136b890e88ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qdpm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nw55k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.576793 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c534fd9f-3767-4be6-a84e-45260fe2042f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1daa82f1b7e2909d26e662848abe7adaa29361f528ea2efac340bfd685bb155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6b629ad4121cddae85f73b40e36013ebfd8d41e9f990dba444bdd2e51d8f9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9n2mj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:50Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wh45q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.591841 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"14260296-7de2-4c79-8afc-9472b7132a27\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2d1a0a531df208cd612ed7da358209e05fb5a21f658ca2d1ab62720db094e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9d7ccc34d02e190d685a52bfa797834b585dfe919856b9af92419fa4e6e3c8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42db724f492a056ad52bf50f43c9603664c4307a8f0c01d2d83d36f644c134c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5305f42ae8fc9005ec4726813a881f7b86ab167dd060238bbde4c9af5eb2c010\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://009056b5d735b1cd232be92cb1bfae18ea1e4e6f5b06185d424bba38ef57ade7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T09:49:30Z\\\",\\\"message\\\":\\\"W0930 09:49:19.606005 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 09:49:19.606415 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759225759 cert, and key in /tmp/serving-cert-1864340106/serving-signer.crt, /tmp/serving-cert-1864340106/serving-signer.key\\\\nI0930 09:49:19.940795 1 observer_polling.go:159] Starting file observer\\\\nW0930 09:49:19.943973 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 09:49:19.944252 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 09:49:19.946386 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1864340106/tls.crt::/tmp/serving-cert-1864340106/tls.key\\\\\\\"\\\\nF0930 09:49:30.331930 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2da24b18038fc740ead0fe99d18f91c87014548941763cf0ef15284ed5eb228\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:19Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://674fc8de60d8bca93dc0d54d5e12664780d58b8b4c5eaf314e923c2f0644e11b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T09:49:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.606554 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"17c9fe87-51d8-4e94-b0aa-10f3112788c6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a687cd54db44118582a4e1b57fc64241860ae1cfdc202ffd5dccc517967e21f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95fa75e3d43d0b4c4f3ddcb59fc3bd9fb3a75647e5fedd92463bdfaf6c3db722\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://758b57b21f203bcb20f4e3a3b20c6de8696ba56b4e0eff6a31faaf6879a3e7b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba42c83f0075e3d0f10f24307dd574c42a505351503a8794bbcee9e986ec80e3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:16Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.620475 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1917c40c0ff4b4b6a2389c465a4887bad0db0b5b86a8715e0c790b3e0acfb973\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.633605 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:40Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e32477c6ee071d9e994825da575711cc75f177c998dd5fd170be07551b40e049\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.642357 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.642394 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.642406 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.642425 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.642440 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:46Z","lastTransitionTime":"2025-09-30T09:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.646264 4730 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-s64nf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1a7e6b85-ac68-4da9-b7eb-b5a936f639df\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T09:49:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a77da47c8d44b3aae7f4e27fcc4df190549c2d881e0969dbe3b8951103477ba3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T09:49:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6rknv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T09:49:37Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-s64nf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T09:50:46Z is after 2025-08-24T17:21:41Z" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.745698 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.745755 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.745768 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.745785 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.745797 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:46Z","lastTransitionTime":"2025-09-30T09:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.848484 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.848561 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.848581 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.848644 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.848671 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:46Z","lastTransitionTime":"2025-09-30T09:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.952260 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.952300 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.952310 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.952327 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:46 crc kubenswrapper[4730]: I0930 09:50:46.952338 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:46Z","lastTransitionTime":"2025-09-30T09:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.054949 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.054992 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.055003 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.055019 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.055029 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:47Z","lastTransitionTime":"2025-09-30T09:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.159084 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.159152 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.159172 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.159207 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.159234 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:47Z","lastTransitionTime":"2025-09-30T09:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.263047 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.263099 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.263111 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.263131 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.263183 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:47Z","lastTransitionTime":"2025-09-30T09:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.367140 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.367230 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.367259 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.367298 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.367325 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:47Z","lastTransitionTime":"2025-09-30T09:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.380381 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:47 crc kubenswrapper[4730]: E0930 09:50:47.380588 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.469854 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.469913 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.469926 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.469948 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.469961 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:47Z","lastTransitionTime":"2025-09-30T09:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.573532 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.573938 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.574019 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.574110 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.574185 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:47Z","lastTransitionTime":"2025-09-30T09:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.677867 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.678746 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.678843 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.678939 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.679026 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:47Z","lastTransitionTime":"2025-09-30T09:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.782069 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.782106 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.782118 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.782135 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.782151 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:47Z","lastTransitionTime":"2025-09-30T09:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.884903 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.884945 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.884957 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.884978 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.884995 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:47Z","lastTransitionTime":"2025-09-30T09:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.988432 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.988520 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.988545 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.988577 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:47 crc kubenswrapper[4730]: I0930 09:50:47.988632 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:47Z","lastTransitionTime":"2025-09-30T09:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.091928 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.091985 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.091998 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.092017 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.092031 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:48Z","lastTransitionTime":"2025-09-30T09:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.194894 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.195106 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.195131 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.195164 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.195193 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:48Z","lastTransitionTime":"2025-09-30T09:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.298218 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.298444 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.298562 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.298675 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.298696 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:48Z","lastTransitionTime":"2025-09-30T09:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.379884 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.379997 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:48 crc kubenswrapper[4730]: E0930 09:50:48.380070 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.380127 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:48 crc kubenswrapper[4730]: E0930 09:50:48.380197 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:48 crc kubenswrapper[4730]: E0930 09:50:48.380318 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.401457 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.401511 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.401522 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.401539 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.401554 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:48Z","lastTransitionTime":"2025-09-30T09:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.505566 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.505663 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.505682 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.505751 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.505769 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:48Z","lastTransitionTime":"2025-09-30T09:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.608001 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.608071 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.608094 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.608118 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.608131 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:48Z","lastTransitionTime":"2025-09-30T09:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.711821 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.711929 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.711962 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.711991 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.712009 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:48Z","lastTransitionTime":"2025-09-30T09:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.815815 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.815882 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.815895 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.815917 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.815931 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:48Z","lastTransitionTime":"2025-09-30T09:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.918318 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.918377 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.918389 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.918408 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:48 crc kubenswrapper[4730]: I0930 09:50:48.918419 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:48Z","lastTransitionTime":"2025-09-30T09:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.020949 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.020985 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.020995 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.021068 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.021088 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:49Z","lastTransitionTime":"2025-09-30T09:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.124399 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.124721 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.124910 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.124996 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.125072 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:49Z","lastTransitionTime":"2025-09-30T09:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.228101 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.228174 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.228199 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.228226 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.228244 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:49Z","lastTransitionTime":"2025-09-30T09:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.331839 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.331900 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.331915 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.331941 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.331960 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:49Z","lastTransitionTime":"2025-09-30T09:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.380491 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:49 crc kubenswrapper[4730]: E0930 09:50:49.380720 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.435235 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.435301 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.435315 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.435339 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.435354 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:49Z","lastTransitionTime":"2025-09-30T09:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.538233 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.538324 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.538337 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.538358 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.538371 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:49Z","lastTransitionTime":"2025-09-30T09:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.642256 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.642312 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.642324 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.642346 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.642358 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:49Z","lastTransitionTime":"2025-09-30T09:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.745265 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.745309 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.745320 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.745338 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.745349 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:49Z","lastTransitionTime":"2025-09-30T09:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.848204 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.848274 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.848315 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.848351 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.848373 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:49Z","lastTransitionTime":"2025-09-30T09:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.950892 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.950967 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.950990 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.951024 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:49 crc kubenswrapper[4730]: I0930 09:50:49.951051 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:49Z","lastTransitionTime":"2025-09-30T09:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.053255 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.053307 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.053321 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.053346 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.053359 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:50Z","lastTransitionTime":"2025-09-30T09:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.156238 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.156302 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.156321 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.156345 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.156358 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:50Z","lastTransitionTime":"2025-09-30T09:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.259336 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.259400 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.259413 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.259449 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.259461 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:50Z","lastTransitionTime":"2025-09-30T09:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.362091 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.362130 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.362139 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.362176 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.362187 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:50Z","lastTransitionTime":"2025-09-30T09:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.380118 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:50 crc kubenswrapper[4730]: E0930 09:50:50.380242 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.380122 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.380315 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:50 crc kubenswrapper[4730]: E0930 09:50:50.380367 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:50 crc kubenswrapper[4730]: E0930 09:50:50.380471 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.465166 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.465232 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.465243 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.465262 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.465273 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:50Z","lastTransitionTime":"2025-09-30T09:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.568693 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.568743 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.568754 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.568770 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.568781 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:50Z","lastTransitionTime":"2025-09-30T09:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.671328 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.671758 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.671877 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.671967 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.672032 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:50Z","lastTransitionTime":"2025-09-30T09:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.775363 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.776183 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.776266 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.776335 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.776448 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:50Z","lastTransitionTime":"2025-09-30T09:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.880103 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.880503 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.880578 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.880681 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.880764 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:50Z","lastTransitionTime":"2025-09-30T09:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.984812 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.985206 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.985359 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.985507 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:50 crc kubenswrapper[4730]: I0930 09:50:50.985678 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:50Z","lastTransitionTime":"2025-09-30T09:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.088671 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.088709 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.088719 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.088735 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.088746 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:51Z","lastTransitionTime":"2025-09-30T09:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.192030 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.192570 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.192804 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.193044 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.193263 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:51Z","lastTransitionTime":"2025-09-30T09:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.296367 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.296416 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.296424 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.296443 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.296454 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:51Z","lastTransitionTime":"2025-09-30T09:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.380151 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:51 crc kubenswrapper[4730]: E0930 09:50:51.380492 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.381278 4730 scope.go:117] "RemoveContainer" containerID="380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669" Sep 30 09:50:51 crc kubenswrapper[4730]: E0930 09:50:51.381465 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.399813 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.399868 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.399878 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.399897 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.399908 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:51Z","lastTransitionTime":"2025-09-30T09:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.503545 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.503593 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.503624 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.503649 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.503665 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:51Z","lastTransitionTime":"2025-09-30T09:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.606919 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.606971 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.607016 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.607032 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.607041 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:51Z","lastTransitionTime":"2025-09-30T09:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.709898 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.709943 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.709954 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.709984 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.709999 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:51Z","lastTransitionTime":"2025-09-30T09:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.812243 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.812293 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.812303 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.812320 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.812331 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:51Z","lastTransitionTime":"2025-09-30T09:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.915111 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.915163 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.915175 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.915194 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:51 crc kubenswrapper[4730]: I0930 09:50:51.915207 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:51Z","lastTransitionTime":"2025-09-30T09:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.018256 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.018363 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.018402 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.018437 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.018460 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:52Z","lastTransitionTime":"2025-09-30T09:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.121068 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.121124 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.121137 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.121158 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.121169 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:52Z","lastTransitionTime":"2025-09-30T09:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.224308 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.224357 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.224368 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.224384 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.224394 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:52Z","lastTransitionTime":"2025-09-30T09:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.327928 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.327989 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.328004 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.328025 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.328040 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:52Z","lastTransitionTime":"2025-09-30T09:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.380823 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.380928 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.380826 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:52 crc kubenswrapper[4730]: E0930 09:50:52.381064 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:52 crc kubenswrapper[4730]: E0930 09:50:52.381211 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:52 crc kubenswrapper[4730]: E0930 09:50:52.381378 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.430801 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.430876 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.430902 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.430936 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.430960 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:52Z","lastTransitionTime":"2025-09-30T09:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.534112 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.534189 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.534215 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.534248 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.534276 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:52Z","lastTransitionTime":"2025-09-30T09:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.638364 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.638440 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.638468 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.638501 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.638527 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:52Z","lastTransitionTime":"2025-09-30T09:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.742199 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.742283 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.742320 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.742354 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.742375 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:52Z","lastTransitionTime":"2025-09-30T09:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.845502 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.845564 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.845582 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.845674 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.845695 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:52Z","lastTransitionTime":"2025-09-30T09:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.949408 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.949460 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.949470 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.949492 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:52 crc kubenswrapper[4730]: I0930 09:50:52.949505 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:52Z","lastTransitionTime":"2025-09-30T09:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.052173 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.052240 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.052254 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.052276 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.052314 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:53Z","lastTransitionTime":"2025-09-30T09:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.155327 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.155383 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.155397 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.155418 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.155432 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:53Z","lastTransitionTime":"2025-09-30T09:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.258309 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.258387 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.258406 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.258427 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.258441 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:53Z","lastTransitionTime":"2025-09-30T09:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.361566 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.361673 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.361694 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.361726 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.361746 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:53Z","lastTransitionTime":"2025-09-30T09:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.380420 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:53 crc kubenswrapper[4730]: E0930 09:50:53.380718 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.465220 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.465268 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.465281 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.465298 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.465309 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:53Z","lastTransitionTime":"2025-09-30T09:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.569149 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.569212 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.569233 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.569254 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.569266 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:53Z","lastTransitionTime":"2025-09-30T09:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.673434 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.673577 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.673602 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.673702 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.673765 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:53Z","lastTransitionTime":"2025-09-30T09:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.778123 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.778172 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.778182 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.778200 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.778212 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:53Z","lastTransitionTime":"2025-09-30T09:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.880721 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.880758 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.880769 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.880784 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.880794 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:53Z","lastTransitionTime":"2025-09-30T09:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.982975 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.983031 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.983047 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.983074 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:53 crc kubenswrapper[4730]: I0930 09:50:53.983092 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:53Z","lastTransitionTime":"2025-09-30T09:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.086324 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.086383 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.086398 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.086419 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.086433 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:54Z","lastTransitionTime":"2025-09-30T09:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.189746 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.189808 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.189821 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.189843 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.189856 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:54Z","lastTransitionTime":"2025-09-30T09:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.292057 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.292105 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.292116 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.292133 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.292143 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:54Z","lastTransitionTime":"2025-09-30T09:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.379925 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.379925 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.380134 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:54 crc kubenswrapper[4730]: E0930 09:50:54.380339 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:54 crc kubenswrapper[4730]: E0930 09:50:54.380466 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:54 crc kubenswrapper[4730]: E0930 09:50:54.380591 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.395354 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.395398 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.395411 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.395428 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.395439 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:54Z","lastTransitionTime":"2025-09-30T09:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.480583 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.480640 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.480651 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.480667 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.480677 4730 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T09:50:54Z","lastTransitionTime":"2025-09-30T09:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.534870 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd"] Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.535516 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.538160 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.538299 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.538333 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.538819 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.594948 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-p4xvk" podStartSLOduration=77.594903807 podStartE2EDuration="1m17.594903807s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:50:54.594208958 +0000 UTC m=+98.927468961" watchObservedRunningTime="2025-09-30 09:50:54.594903807 +0000 UTC m=+98.928163800" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.606104 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=22.606077807 podStartE2EDuration="22.606077807s" podCreationTimestamp="2025-09-30 09:50:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:50:54.604994237 +0000 UTC m=+98.938254240" watchObservedRunningTime="2025-09-30 09:50:54.606077807 +0000 UTC m=+98.939337800" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.617134 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=47.617116743 podStartE2EDuration="47.617116743s" podCreationTimestamp="2025-09-30 09:50:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:50:54.617083042 +0000 UTC m=+98.950343035" watchObservedRunningTime="2025-09-30 09:50:54.617116743 +0000 UTC m=+98.950376736" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.654906 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podStartSLOduration=77.6548813 podStartE2EDuration="1m17.6548813s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:50:54.654456219 +0000 UTC m=+98.987716212" watchObservedRunningTime="2025-09-30 09:50:54.6548813 +0000 UTC m=+98.988141293" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.664531 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-nw55k" podStartSLOduration=77.664509167 podStartE2EDuration="1m17.664509167s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:50:54.663413887 +0000 UTC m=+98.996673890" watchObservedRunningTime="2025-09-30 09:50:54.664509167 +0000 UTC m=+98.997769160" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.674217 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wh45q" podStartSLOduration=77.674196846 podStartE2EDuration="1m17.674196846s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:50:54.674061243 +0000 UTC m=+99.007321256" watchObservedRunningTime="2025-09-30 09:50:54.674196846 +0000 UTC m=+99.007456849" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.683439 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/27f898a2-36ea-4193-9424-d4575f3c613c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.683493 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/27f898a2-36ea-4193-9424-d4575f3c613c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.683618 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/27f898a2-36ea-4193-9424-d4575f3c613c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.683669 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/27f898a2-36ea-4193-9424-d4575f3c613c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.683694 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27f898a2-36ea-4193-9424-d4575f3c613c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.694220 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=78.694195801 podStartE2EDuration="1m18.694195801s" podCreationTimestamp="2025-09-30 09:49:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:50:54.693947984 +0000 UTC m=+99.027207987" watchObservedRunningTime="2025-09-30 09:50:54.694195801 +0000 UTC m=+99.027455794" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.709907 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=73.709885347 podStartE2EDuration="1m13.709885347s" podCreationTimestamp="2025-09-30 09:49:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:50:54.709546547 +0000 UTC m=+99.042806540" watchObservedRunningTime="2025-09-30 09:50:54.709885347 +0000 UTC m=+99.043145330" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.738124 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-s64nf" podStartSLOduration=77.738064718 podStartE2EDuration="1m17.738064718s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:50:54.72519972 +0000 UTC m=+99.058459703" watchObservedRunningTime="2025-09-30 09:50:54.738064718 +0000 UTC m=+99.071324711" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.763995 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-t2frc" podStartSLOduration=77.763970116 podStartE2EDuration="1m17.763970116s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:50:54.763767301 +0000 UTC m=+99.097027294" watchObservedRunningTime="2025-09-30 09:50:54.763970116 +0000 UTC m=+99.097230109" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.785055 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/27f898a2-36ea-4193-9424-d4575f3c613c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.785116 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/27f898a2-36ea-4193-9424-d4575f3c613c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.785137 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/27f898a2-36ea-4193-9424-d4575f3c613c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.785163 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27f898a2-36ea-4193-9424-d4575f3c613c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.785194 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/27f898a2-36ea-4193-9424-d4575f3c613c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.785253 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/27f898a2-36ea-4193-9424-d4575f3c613c-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.785261 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/27f898a2-36ea-4193-9424-d4575f3c613c-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.786702 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/27f898a2-36ea-4193-9424-d4575f3c613c-service-ca\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.791250 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27f898a2-36ea-4193-9424-d4575f3c613c-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.811409 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/27f898a2-36ea-4193-9424-d4575f3c613c-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-xplcd\" (UID: \"27f898a2-36ea-4193-9424-d4575f3c613c\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.849759 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" Sep 30 09:50:54 crc kubenswrapper[4730]: I0930 09:50:54.939946 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" event={"ID":"27f898a2-36ea-4193-9424-d4575f3c613c","Type":"ContainerStarted","Data":"3945b3747d9437c0d55e28fef9c431e0bc27030d3403caa113462d6dbbad54fb"} Sep 30 09:50:55 crc kubenswrapper[4730]: I0930 09:50:55.380253 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:55 crc kubenswrapper[4730]: E0930 09:50:55.380421 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:55 crc kubenswrapper[4730]: I0930 09:50:55.493122 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:55 crc kubenswrapper[4730]: E0930 09:50:55.493364 4730 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:50:55 crc kubenswrapper[4730]: E0930 09:50:55.493451 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs podName:be86a67e-c663-4551-9ecf-a8c2a9801cd7 nodeName:}" failed. No retries permitted until 2025-09-30 09:51:59.493429018 +0000 UTC m=+163.826689031 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs") pod "network-metrics-daemon-dqqrb" (UID: "be86a67e-c663-4551-9ecf-a8c2a9801cd7") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 09:50:55 crc kubenswrapper[4730]: I0930 09:50:55.946647 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" event={"ID":"27f898a2-36ea-4193-9424-d4575f3c613c","Type":"ContainerStarted","Data":"329089f24c2863376c8ea442ebc0f65ffe5b2d64a8ce0547ae85b1dfb634fd81"} Sep 30 09:50:55 crc kubenswrapper[4730]: I0930 09:50:55.960701 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xplcd" podStartSLOduration=78.960676717 podStartE2EDuration="1m18.960676717s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:50:55.960663307 +0000 UTC m=+100.293923330" watchObservedRunningTime="2025-09-30 09:50:55.960676717 +0000 UTC m=+100.293936710" Sep 30 09:50:56 crc kubenswrapper[4730]: I0930 09:50:56.380029 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:56 crc kubenswrapper[4730]: I0930 09:50:56.380100 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:56 crc kubenswrapper[4730]: E0930 09:50:56.381217 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:56 crc kubenswrapper[4730]: I0930 09:50:56.381275 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:56 crc kubenswrapper[4730]: E0930 09:50:56.381401 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:56 crc kubenswrapper[4730]: E0930 09:50:56.381640 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:57 crc kubenswrapper[4730]: I0930 09:50:57.379986 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:57 crc kubenswrapper[4730]: E0930 09:50:57.380168 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:50:58 crc kubenswrapper[4730]: I0930 09:50:58.380491 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:50:58 crc kubenswrapper[4730]: E0930 09:50:58.380672 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:50:58 crc kubenswrapper[4730]: I0930 09:50:58.380872 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:50:58 crc kubenswrapper[4730]: I0930 09:50:58.380905 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:50:58 crc kubenswrapper[4730]: E0930 09:50:58.380952 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:50:58 crc kubenswrapper[4730]: E0930 09:50:58.381133 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:50:59 crc kubenswrapper[4730]: I0930 09:50:59.380311 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:50:59 crc kubenswrapper[4730]: E0930 09:50:59.380471 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:00 crc kubenswrapper[4730]: I0930 09:51:00.380946 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:00 crc kubenswrapper[4730]: I0930 09:51:00.381116 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:00 crc kubenswrapper[4730]: E0930 09:51:00.381179 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:00 crc kubenswrapper[4730]: I0930 09:51:00.381209 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:00 crc kubenswrapper[4730]: E0930 09:51:00.381477 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:00 crc kubenswrapper[4730]: E0930 09:51:00.381675 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:01 crc kubenswrapper[4730]: I0930 09:51:01.380442 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:01 crc kubenswrapper[4730]: E0930 09:51:01.380700 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:02 crc kubenswrapper[4730]: I0930 09:51:02.380437 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:02 crc kubenswrapper[4730]: I0930 09:51:02.380476 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:02 crc kubenswrapper[4730]: I0930 09:51:02.380445 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:02 crc kubenswrapper[4730]: E0930 09:51:02.380579 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:02 crc kubenswrapper[4730]: E0930 09:51:02.380682 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:02 crc kubenswrapper[4730]: E0930 09:51:02.380880 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:03 crc kubenswrapper[4730]: I0930 09:51:03.380240 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:03 crc kubenswrapper[4730]: E0930 09:51:03.380470 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:03 crc kubenswrapper[4730]: I0930 09:51:03.397899 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 30 09:51:04 crc kubenswrapper[4730]: I0930 09:51:04.380806 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:04 crc kubenswrapper[4730]: I0930 09:51:04.380851 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:04 crc kubenswrapper[4730]: I0930 09:51:04.380880 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:04 crc kubenswrapper[4730]: E0930 09:51:04.381058 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:04 crc kubenswrapper[4730]: E0930 09:51:04.381167 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:04 crc kubenswrapper[4730]: E0930 09:51:04.381899 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:04 crc kubenswrapper[4730]: I0930 09:51:04.382086 4730 scope.go:117] "RemoveContainer" containerID="380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669" Sep 30 09:51:04 crc kubenswrapper[4730]: E0930 09:51:04.382265 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5vmh_openshift-ovn-kubernetes(823c4c28-801d-421e-b15f-02a17e300753)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" Sep 30 09:51:05 crc kubenswrapper[4730]: I0930 09:51:05.380587 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:05 crc kubenswrapper[4730]: E0930 09:51:05.380872 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:06 crc kubenswrapper[4730]: I0930 09:51:06.380430 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:06 crc kubenswrapper[4730]: I0930 09:51:06.380559 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:06 crc kubenswrapper[4730]: I0930 09:51:06.380747 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:06 crc kubenswrapper[4730]: E0930 09:51:06.380736 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:06 crc kubenswrapper[4730]: E0930 09:51:06.381039 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:06 crc kubenswrapper[4730]: E0930 09:51:06.381131 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:06 crc kubenswrapper[4730]: I0930 09:51:06.406543 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=3.406526559 podStartE2EDuration="3.406526559s" podCreationTimestamp="2025-09-30 09:51:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:06.405671096 +0000 UTC m=+110.738931129" watchObservedRunningTime="2025-09-30 09:51:06.406526559 +0000 UTC m=+110.739786552" Sep 30 09:51:07 crc kubenswrapper[4730]: I0930 09:51:07.380494 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:07 crc kubenswrapper[4730]: E0930 09:51:07.380651 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:08 crc kubenswrapper[4730]: I0930 09:51:08.380697 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:08 crc kubenswrapper[4730]: I0930 09:51:08.380737 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:08 crc kubenswrapper[4730]: I0930 09:51:08.380791 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:08 crc kubenswrapper[4730]: E0930 09:51:08.380844 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:08 crc kubenswrapper[4730]: E0930 09:51:08.380936 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:08 crc kubenswrapper[4730]: E0930 09:51:08.381016 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:09 crc kubenswrapper[4730]: I0930 09:51:09.380307 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:09 crc kubenswrapper[4730]: E0930 09:51:09.380504 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:10 crc kubenswrapper[4730]: I0930 09:51:10.380590 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:10 crc kubenswrapper[4730]: I0930 09:51:10.380683 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:10 crc kubenswrapper[4730]: I0930 09:51:10.380721 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:10 crc kubenswrapper[4730]: E0930 09:51:10.381081 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:10 crc kubenswrapper[4730]: E0930 09:51:10.381149 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:10 crc kubenswrapper[4730]: E0930 09:51:10.381256 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:11 crc kubenswrapper[4730]: I0930 09:51:11.380246 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:11 crc kubenswrapper[4730]: E0930 09:51:11.380397 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:12 crc kubenswrapper[4730]: I0930 09:51:12.380626 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:12 crc kubenswrapper[4730]: E0930 09:51:12.380973 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:12 crc kubenswrapper[4730]: I0930 09:51:12.380751 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:12 crc kubenswrapper[4730]: E0930 09:51:12.381046 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:12 crc kubenswrapper[4730]: I0930 09:51:12.380709 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:12 crc kubenswrapper[4730]: E0930 09:51:12.381097 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:13 crc kubenswrapper[4730]: I0930 09:51:13.008564 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t2frc_98a6f8df-1ac8-4652-8074-90cb180311ad/kube-multus/1.log" Sep 30 09:51:13 crc kubenswrapper[4730]: I0930 09:51:13.009205 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t2frc_98a6f8df-1ac8-4652-8074-90cb180311ad/kube-multus/0.log" Sep 30 09:51:13 crc kubenswrapper[4730]: I0930 09:51:13.009353 4730 generic.go:334] "Generic (PLEG): container finished" podID="98a6f8df-1ac8-4652-8074-90cb180311ad" containerID="9ed00cdb0dca4ffa70594334507af7834b99fd93be39a245a3b569f39154c2a1" exitCode=1 Sep 30 09:51:13 crc kubenswrapper[4730]: I0930 09:51:13.009444 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t2frc" event={"ID":"98a6f8df-1ac8-4652-8074-90cb180311ad","Type":"ContainerDied","Data":"9ed00cdb0dca4ffa70594334507af7834b99fd93be39a245a3b569f39154c2a1"} Sep 30 09:51:13 crc kubenswrapper[4730]: I0930 09:51:13.009492 4730 scope.go:117] "RemoveContainer" containerID="ca62256374b9da6cb7b82db90ee6d121b072440c000bc7dfb04e763224cf666d" Sep 30 09:51:13 crc kubenswrapper[4730]: I0930 09:51:13.012893 4730 scope.go:117] "RemoveContainer" containerID="9ed00cdb0dca4ffa70594334507af7834b99fd93be39a245a3b569f39154c2a1" Sep 30 09:51:13 crc kubenswrapper[4730]: E0930 09:51:13.013070 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-t2frc_openshift-multus(98a6f8df-1ac8-4652-8074-90cb180311ad)\"" pod="openshift-multus/multus-t2frc" podUID="98a6f8df-1ac8-4652-8074-90cb180311ad" Sep 30 09:51:13 crc kubenswrapper[4730]: I0930 09:51:13.380067 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:13 crc kubenswrapper[4730]: E0930 09:51:13.380220 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:14 crc kubenswrapper[4730]: I0930 09:51:14.014441 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t2frc_98a6f8df-1ac8-4652-8074-90cb180311ad/kube-multus/1.log" Sep 30 09:51:14 crc kubenswrapper[4730]: I0930 09:51:14.380773 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:14 crc kubenswrapper[4730]: I0930 09:51:14.380789 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:14 crc kubenswrapper[4730]: I0930 09:51:14.380851 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:14 crc kubenswrapper[4730]: E0930 09:51:14.381437 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:14 crc kubenswrapper[4730]: E0930 09:51:14.381540 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:14 crc kubenswrapper[4730]: E0930 09:51:14.382454 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:15 crc kubenswrapper[4730]: I0930 09:51:15.380066 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:15 crc kubenswrapper[4730]: E0930 09:51:15.380250 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:16 crc kubenswrapper[4730]: E0930 09:51:16.369688 4730 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 30 09:51:16 crc kubenswrapper[4730]: I0930 09:51:16.380548 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:16 crc kubenswrapper[4730]: I0930 09:51:16.380562 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:16 crc kubenswrapper[4730]: E0930 09:51:16.381703 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:16 crc kubenswrapper[4730]: I0930 09:51:16.381784 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:16 crc kubenswrapper[4730]: E0930 09:51:16.382466 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:16 crc kubenswrapper[4730]: E0930 09:51:16.382591 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:16 crc kubenswrapper[4730]: E0930 09:51:16.480856 4730 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 09:51:17 crc kubenswrapper[4730]: I0930 09:51:17.379886 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:17 crc kubenswrapper[4730]: E0930 09:51:17.380124 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:18 crc kubenswrapper[4730]: I0930 09:51:18.379970 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:18 crc kubenswrapper[4730]: I0930 09:51:18.380001 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:18 crc kubenswrapper[4730]: I0930 09:51:18.380099 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:18 crc kubenswrapper[4730]: E0930 09:51:18.380125 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:18 crc kubenswrapper[4730]: E0930 09:51:18.380292 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:18 crc kubenswrapper[4730]: E0930 09:51:18.380370 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:18 crc kubenswrapper[4730]: I0930 09:51:18.381473 4730 scope.go:117] "RemoveContainer" containerID="380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669" Sep 30 09:51:19 crc kubenswrapper[4730]: I0930 09:51:19.033610 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/3.log" Sep 30 09:51:19 crc kubenswrapper[4730]: I0930 09:51:19.036073 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerStarted","Data":"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b"} Sep 30 09:51:19 crc kubenswrapper[4730]: I0930 09:51:19.036474 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:51:19 crc kubenswrapper[4730]: I0930 09:51:19.063599 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podStartSLOduration=102.063580174 podStartE2EDuration="1m42.063580174s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:19.062326049 +0000 UTC m=+123.395586042" watchObservedRunningTime="2025-09-30 09:51:19.063580174 +0000 UTC m=+123.396840167" Sep 30 09:51:19 crc kubenswrapper[4730]: I0930 09:51:19.185088 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-dqqrb"] Sep 30 09:51:19 crc kubenswrapper[4730]: I0930 09:51:19.185433 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:19 crc kubenswrapper[4730]: E0930 09:51:19.185516 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:20 crc kubenswrapper[4730]: I0930 09:51:20.402603 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:20 crc kubenswrapper[4730]: I0930 09:51:20.402686 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:20 crc kubenswrapper[4730]: I0930 09:51:20.402649 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:20 crc kubenswrapper[4730]: I0930 09:51:20.402805 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:20 crc kubenswrapper[4730]: E0930 09:51:20.402811 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:20 crc kubenswrapper[4730]: E0930 09:51:20.402895 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:20 crc kubenswrapper[4730]: E0930 09:51:20.402962 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:20 crc kubenswrapper[4730]: E0930 09:51:20.403020 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:21 crc kubenswrapper[4730]: E0930 09:51:21.482014 4730 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 09:51:22 crc kubenswrapper[4730]: I0930 09:51:22.380476 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:22 crc kubenswrapper[4730]: E0930 09:51:22.380596 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:22 crc kubenswrapper[4730]: I0930 09:51:22.380795 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:22 crc kubenswrapper[4730]: E0930 09:51:22.380842 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:22 crc kubenswrapper[4730]: I0930 09:51:22.380855 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:22 crc kubenswrapper[4730]: I0930 09:51:22.380880 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:22 crc kubenswrapper[4730]: E0930 09:51:22.381061 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:22 crc kubenswrapper[4730]: E0930 09:51:22.381208 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:24 crc kubenswrapper[4730]: I0930 09:51:24.042741 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:51:24 crc kubenswrapper[4730]: I0930 09:51:24.380896 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:24 crc kubenswrapper[4730]: I0930 09:51:24.380958 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:24 crc kubenswrapper[4730]: I0930 09:51:24.380919 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:24 crc kubenswrapper[4730]: I0930 09:51:24.381025 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:24 crc kubenswrapper[4730]: E0930 09:51:24.381126 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:24 crc kubenswrapper[4730]: E0930 09:51:24.381202 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:24 crc kubenswrapper[4730]: E0930 09:51:24.381299 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:24 crc kubenswrapper[4730]: E0930 09:51:24.381479 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:26 crc kubenswrapper[4730]: I0930 09:51:26.380389 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:26 crc kubenswrapper[4730]: I0930 09:51:26.380438 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:26 crc kubenswrapper[4730]: E0930 09:51:26.382473 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:26 crc kubenswrapper[4730]: I0930 09:51:26.382506 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:26 crc kubenswrapper[4730]: I0930 09:51:26.382564 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:26 crc kubenswrapper[4730]: E0930 09:51:26.382683 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:26 crc kubenswrapper[4730]: E0930 09:51:26.382734 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:26 crc kubenswrapper[4730]: E0930 09:51:26.382872 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:26 crc kubenswrapper[4730]: E0930 09:51:26.482660 4730 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 09:51:27 crc kubenswrapper[4730]: I0930 09:51:27.381057 4730 scope.go:117] "RemoveContainer" containerID="9ed00cdb0dca4ffa70594334507af7834b99fd93be39a245a3b569f39154c2a1" Sep 30 09:51:28 crc kubenswrapper[4730]: I0930 09:51:28.070172 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t2frc_98a6f8df-1ac8-4652-8074-90cb180311ad/kube-multus/1.log" Sep 30 09:51:28 crc kubenswrapper[4730]: I0930 09:51:28.070259 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t2frc" event={"ID":"98a6f8df-1ac8-4652-8074-90cb180311ad","Type":"ContainerStarted","Data":"a5bb1f559693666f8926ed88798ba0efed5cfc3fb9c465817367617f57eaf858"} Sep 30 09:51:28 crc kubenswrapper[4730]: I0930 09:51:28.380281 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:28 crc kubenswrapper[4730]: I0930 09:51:28.380321 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:28 crc kubenswrapper[4730]: I0930 09:51:28.380379 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:28 crc kubenswrapper[4730]: I0930 09:51:28.380382 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:28 crc kubenswrapper[4730]: E0930 09:51:28.380601 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:28 crc kubenswrapper[4730]: E0930 09:51:28.380759 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:28 crc kubenswrapper[4730]: E0930 09:51:28.380879 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:28 crc kubenswrapper[4730]: E0930 09:51:28.380987 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:30 crc kubenswrapper[4730]: I0930 09:51:30.380843 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:30 crc kubenswrapper[4730]: I0930 09:51:30.380938 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:30 crc kubenswrapper[4730]: E0930 09:51:30.380962 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 09:51:30 crc kubenswrapper[4730]: I0930 09:51:30.381014 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:30 crc kubenswrapper[4730]: I0930 09:51:30.380844 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:30 crc kubenswrapper[4730]: E0930 09:51:30.381063 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 09:51:30 crc kubenswrapper[4730]: E0930 09:51:30.381161 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-dqqrb" podUID="be86a67e-c663-4551-9ecf-a8c2a9801cd7" Sep 30 09:51:30 crc kubenswrapper[4730]: E0930 09:51:30.381232 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 09:51:32 crc kubenswrapper[4730]: I0930 09:51:32.380591 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:32 crc kubenswrapper[4730]: I0930 09:51:32.380675 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:32 crc kubenswrapper[4730]: I0930 09:51:32.380723 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:32 crc kubenswrapper[4730]: I0930 09:51:32.380590 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:32 crc kubenswrapper[4730]: I0930 09:51:32.382945 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 30 09:51:32 crc kubenswrapper[4730]: I0930 09:51:32.383057 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 30 09:51:32 crc kubenswrapper[4730]: I0930 09:51:32.383115 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 30 09:51:32 crc kubenswrapper[4730]: I0930 09:51:32.383119 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 30 09:51:32 crc kubenswrapper[4730]: I0930 09:51:32.383221 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 30 09:51:32 crc kubenswrapper[4730]: I0930 09:51:32.384076 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.319999 4730 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.360411 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.361226 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vgtf6"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.361550 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tdl79"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.362187 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.362922 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.363128 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.364681 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-md87h"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.365223 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.366102 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.366723 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.367211 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-df648"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.367704 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.368195 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.368592 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.369657 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.370074 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-75wjk"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.370343 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.374886 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.377121 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.377726 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.377815 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.377883 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.377997 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.378079 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.378170 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.378207 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.378273 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.378337 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.377891 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.378455 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.378212 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.378488 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.379227 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.382167 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.382874 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-r86xc"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.383210 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.383354 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.383488 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.383682 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.383900 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.384430 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.383682 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hlvdp"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.384983 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.383905 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.385548 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.385733 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.386184 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.386229 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.386283 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.386351 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.386405 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.386190 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.384060 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.384186 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.384236 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.386640 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.384279 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.384308 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.384412 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.399507 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-gf4lh"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.384429 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.384212 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.401086 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.419036 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-r8tph"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.419333 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.419825 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.420100 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-gf4lh" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.420414 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.421740 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.421799 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.421900 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.422049 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.422171 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.422298 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.422492 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.424678 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cjsxd"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.443051 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.422578 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.443996 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.444400 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.444493 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.444961 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.422632 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.422721 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.422778 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.423392 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.426540 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.426580 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.426720 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.426795 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.428758 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.428757 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.429755 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.433277 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.434106 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.435047 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.431416 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwqfj\" (UniqueName: \"kubernetes.io/projected/77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c-kube-api-access-vwqfj\") pod \"machine-approver-56656f9798-mpfz6\" (UID: \"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.446544 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dff327ac-2d87-47ce-ae92-f6e001b9d876-serving-cert\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.446575 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dff327ac-2d87-47ce-ae92-f6e001b9d876-audit-dir\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.446630 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dff327ac-2d87-47ce-ae92-f6e001b9d876-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.446889 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f17dee56-592e-45e7-8c4f-80854757d254-serving-cert\") pod \"route-controller-manager-6576b87f9c-cljtg\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.447033 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbb94\" (UniqueName: \"kubernetes.io/projected/f17dee56-592e-45e7-8c4f-80854757d254-kube-api-access-rbb94\") pod \"route-controller-manager-6576b87f9c-cljtg\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.447065 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fc0f4b15-4b92-4799-b77f-8735c85b2203-metrics-tls\") pod \"ingress-operator-5b745b69d9-n9qkg\" (UID: \"fc0f4b15-4b92-4799-b77f-8735c85b2203\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.447105 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/dff327ac-2d87-47ce-ae92-f6e001b9d876-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.447173 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dff327ac-2d87-47ce-ae92-f6e001b9d876-etcd-client\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.447265 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c-machine-approver-tls\") pod \"machine-approver-56656f9798-mpfz6\" (UID: \"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.447288 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c-auth-proxy-config\") pod \"machine-approver-56656f9798-mpfz6\" (UID: \"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.447395 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fc0f4b15-4b92-4799-b77f-8735c85b2203-trusted-ca\") pod \"ingress-operator-5b745b69d9-n9qkg\" (UID: \"fc0f4b15-4b92-4799-b77f-8735c85b2203\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.447419 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f17dee56-592e-45e7-8c4f-80854757d254-config\") pod \"route-controller-manager-6576b87f9c-cljtg\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.447471 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c-config\") pod \"machine-approver-56656f9798-mpfz6\" (UID: \"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.447494 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfdtj\" (UniqueName: \"kubernetes.io/projected/fc0f4b15-4b92-4799-b77f-8735c85b2203-kube-api-access-gfdtj\") pod \"ingress-operator-5b745b69d9-n9qkg\" (UID: \"fc0f4b15-4b92-4799-b77f-8735c85b2203\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.447876 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fc0f4b15-4b92-4799-b77f-8735c85b2203-bound-sa-token\") pod \"ingress-operator-5b745b69d9-n9qkg\" (UID: \"fc0f4b15-4b92-4799-b77f-8735c85b2203\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.447972 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f17dee56-592e-45e7-8c4f-80854757d254-client-ca\") pod \"route-controller-manager-6576b87f9c-cljtg\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.448036 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc5g6\" (UniqueName: \"kubernetes.io/projected/dff327ac-2d87-47ce-ae92-f6e001b9d876-kube-api-access-wc5g6\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.448069 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/dff327ac-2d87-47ce-ae92-f6e001b9d876-encryption-config\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.448109 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dff327ac-2d87-47ce-ae92-f6e001b9d876-audit-policies\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.448964 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-cxj9q"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.449221 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-cjsxd" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.450285 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.451075 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.451527 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-v86bf"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.454566 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.454796 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.454977 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.455098 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.455225 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.456406 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.456851 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xbf9b"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.457406 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-xbf9b" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.457659 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.457808 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v86bf" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.458008 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.458054 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.458741 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.462686 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.465754 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.465886 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.465918 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.465932 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.466008 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.466021 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.466085 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.466587 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.466788 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.474467 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.474497 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.474817 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.475294 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.475407 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.476443 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.476528 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.477038 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.478526 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.478910 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.481469 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.482301 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.500704 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.500821 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.501130 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.501436 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.501663 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.501795 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.501946 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.502363 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-m2sph"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.502813 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.503072 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.503247 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.503597 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.504196 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.507344 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.507570 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.508075 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.508211 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.508369 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.510934 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.511073 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.511878 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.512953 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.515476 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.516550 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.517249 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.518235 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xdzzr"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.518309 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.519179 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.519865 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nt7nq"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.520795 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.525338 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.526814 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.544774 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.545569 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.545898 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.546496 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.546773 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.550756 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-ssdbb"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.551820 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.562006 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.563632 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.564389 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.565304 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.570050 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.572680 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/dff327ac-2d87-47ce-ae92-f6e001b9d876-encryption-config\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.572762 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-oauth-config\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.572812 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/503191a0-1fb3-4b1e-84f1-ac3d702f686e-audit-dir\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.572848 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/584babfe-a3c2-49da-97ac-c3a9fc90f102-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-bkbql\" (UID: \"584babfe-a3c2-49da-97ac-c3a9fc90f102\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.572879 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltrhn\" (UniqueName: \"kubernetes.io/projected/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-kube-api-access-ltrhn\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.572914 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-trusted-ca-bundle\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.572945 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2cf85f90-a707-4cbf-9cea-472b1109692d-serving-cert\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.572989 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-audit-policies\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.574587 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dff327ac-2d87-47ce-ae92-f6e001b9d876-audit-policies\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.574681 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.574717 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-image-import-ca\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.574749 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vdjq\" (UniqueName: \"kubernetes.io/projected/af4abbd6-d1b9-411d-9128-cc5b74a93eb5-kube-api-access-9vdjq\") pod \"downloads-7954f5f757-gf4lh\" (UID: \"af4abbd6-d1b9-411d-9128-cc5b74a93eb5\") " pod="openshift-console/downloads-7954f5f757-gf4lh" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.574788 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.574819 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwqfj\" (UniqueName: \"kubernetes.io/projected/77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c-kube-api-access-vwqfj\") pod \"machine-approver-56656f9798-mpfz6\" (UID: \"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.574851 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dff327ac-2d87-47ce-ae92-f6e001b9d876-serving-cert\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.575233 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dff327ac-2d87-47ce-ae92-f6e001b9d876-audit-dir\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.575408 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.575561 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-config\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.575680 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.575723 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94e2e017-59cc-4809-8244-a100190f35a9-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xhwpp\" (UID: \"94e2e017-59cc-4809-8244-a100190f35a9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576036 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576143 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dff327ac-2d87-47ce-ae92-f6e001b9d876-audit-dir\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576391 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b495236a-11fa-48fb-9361-3c02fe062e4b-audit-dir\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576448 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53bbfcae-905c-4cb8-883d-e027fe0939b5-config\") pod \"console-operator-58897d9998-hlvdp\" (UID: \"53bbfcae-905c-4cb8-883d-e027fe0939b5\") " pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576560 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dff327ac-2d87-47ce-ae92-f6e001b9d876-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576632 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576661 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/503191a0-1fb3-4b1e-84f1-ac3d702f686e-serving-cert\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576687 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/503191a0-1fb3-4b1e-84f1-ac3d702f686e-encryption-config\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576740 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-serving-cert\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576766 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53bbfcae-905c-4cb8-883d-e027fe0939b5-serving-cert\") pod \"console-operator-58897d9998-hlvdp\" (UID: \"53bbfcae-905c-4cb8-883d-e027fe0939b5\") " pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576792 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/503191a0-1fb3-4b1e-84f1-ac3d702f686e-node-pullsecrets\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576822 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f17dee56-592e-45e7-8c4f-80854757d254-serving-cert\") pod \"route-controller-manager-6576b87f9c-cljtg\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576850 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3970c1fd-c1a9-40a4-a2b5-276df544f222-available-featuregates\") pod \"openshift-config-operator-7777fb866f-75wjk\" (UID: \"3970c1fd-c1a9-40a4-a2b5-276df544f222\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576872 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5b9j\" (UniqueName: \"kubernetes.io/projected/3970c1fd-c1a9-40a4-a2b5-276df544f222-kube-api-access-t5b9j\") pod \"openshift-config-operator-7777fb866f-75wjk\" (UID: \"3970c1fd-c1a9-40a4-a2b5-276df544f222\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576894 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxnwp\" (UniqueName: \"kubernetes.io/projected/94e2e017-59cc-4809-8244-a100190f35a9-kube-api-access-pxnwp\") pod \"openshift-controller-manager-operator-756b6f6bc6-xhwpp\" (UID: \"94e2e017-59cc-4809-8244-a100190f35a9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576927 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbb94\" (UniqueName: \"kubernetes.io/projected/f17dee56-592e-45e7-8c4f-80854757d254-kube-api-access-rbb94\") pod \"route-controller-manager-6576b87f9c-cljtg\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576968 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fc0f4b15-4b92-4799-b77f-8735c85b2203-metrics-tls\") pod \"ingress-operator-5b745b69d9-n9qkg\" (UID: \"fc0f4b15-4b92-4799-b77f-8735c85b2203\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.576991 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/dff327ac-2d87-47ce-ae92-f6e001b9d876-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.577021 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rw9b\" (UniqueName: \"kubernetes.io/projected/e0967d6a-234e-4f7b-b4ec-073e1822fec1-kube-api-access-9rw9b\") pod \"cluster-image-registry-operator-dc59b4c8b-zzmfv\" (UID: \"e0967d6a-234e-4f7b-b4ec-073e1822fec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.577045 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.577032 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dff327ac-2d87-47ce-ae92-f6e001b9d876-audit-policies\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.577069 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7sp2\" (UniqueName: \"kubernetes.io/projected/9285876c-50d7-4dc8-a57d-d7a6db5cf3ce-kube-api-access-p7sp2\") pod \"catalog-operator-68c6474976-7sbbv\" (UID: \"9285876c-50d7-4dc8-a57d-d7a6db5cf3ce\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.577104 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dff327ac-2d87-47ce-ae92-f6e001b9d876-etcd-client\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.577127 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e0967d6a-234e-4f7b-b4ec-073e1822fec1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-zzmfv\" (UID: \"e0967d6a-234e-4f7b-b4ec-073e1822fec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.577155 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-service-ca-bundle\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.577178 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.577199 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7shjk\" (UniqueName: \"kubernetes.io/projected/503191a0-1fb3-4b1e-84f1-ac3d702f686e-kube-api-access-7shjk\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.577511 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.577597 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3970c1fd-c1a9-40a4-a2b5-276df544f222-serving-cert\") pod \"openshift-config-operator-7777fb866f-75wjk\" (UID: \"3970c1fd-c1a9-40a4-a2b5-276df544f222\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.577671 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.577918 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9285876c-50d7-4dc8-a57d-d7a6db5cf3ce-srv-cert\") pod \"catalog-operator-68c6474976-7sbbv\" (UID: \"9285876c-50d7-4dc8-a57d-d7a6db5cf3ce\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578068 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e0967d6a-234e-4f7b-b4ec-073e1822fec1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-zzmfv\" (UID: \"e0967d6a-234e-4f7b-b4ec-073e1822fec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578099 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c-machine-approver-tls\") pod \"machine-approver-56656f9798-mpfz6\" (UID: \"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578166 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-config\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578197 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c-auth-proxy-config\") pod \"machine-approver-56656f9798-mpfz6\" (UID: \"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578230 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-oauth-serving-cert\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578313 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578349 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5vf7\" (UniqueName: \"kubernetes.io/projected/2cf85f90-a707-4cbf-9cea-472b1109692d-kube-api-access-h5vf7\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578378 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fc0f4b15-4b92-4799-b77f-8735c85b2203-trusted-ca\") pod \"ingress-operator-5b745b69d9-n9qkg\" (UID: \"fc0f4b15-4b92-4799-b77f-8735c85b2203\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578400 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctkrp\" (UniqueName: \"kubernetes.io/projected/789ee928-afa8-424d-8810-6a04b2a7d5d6-kube-api-access-ctkrp\") pod \"machine-api-operator-5694c8668f-r86xc\" (UID: \"789ee928-afa8-424d-8810-6a04b2a7d5d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578420 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-service-ca\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578440 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-serving-cert\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578730 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/584babfe-a3c2-49da-97ac-c3a9fc90f102-config\") pod \"openshift-apiserver-operator-796bbdcf4f-bkbql\" (UID: \"584babfe-a3c2-49da-97ac-c3a9fc90f102\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578823 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/dff327ac-2d87-47ce-ae92-f6e001b9d876-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578815 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dff327ac-2d87-47ce-ae92-f6e001b9d876-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.578845 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/789ee928-afa8-424d-8810-6a04b2a7d5d6-config\") pod \"machine-api-operator-5694c8668f-r86xc\" (UID: \"789ee928-afa8-424d-8810-6a04b2a7d5d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.579001 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-config\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.579105 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f17dee56-592e-45e7-8c4f-80854757d254-config\") pod \"route-controller-manager-6576b87f9c-cljtg\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.579180 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nr77\" (UniqueName: \"kubernetes.io/projected/53bbfcae-905c-4cb8-883d-e027fe0939b5-kube-api-access-9nr77\") pod \"console-operator-58897d9998-hlvdp\" (UID: \"53bbfcae-905c-4cb8-883d-e027fe0939b5\") " pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.579320 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-etcd-serving-ca\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.579354 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c-config\") pod \"machine-approver-56656f9798-mpfz6\" (UID: \"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.579390 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-config\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.579466 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.579507 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/503191a0-1fb3-4b1e-84f1-ac3d702f686e-etcd-client\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.579463 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c-auth-proxy-config\") pod \"machine-approver-56656f9798-mpfz6\" (UID: \"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.579827 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfdtj\" (UniqueName: \"kubernetes.io/projected/fc0f4b15-4b92-4799-b77f-8735c85b2203-kube-api-access-gfdtj\") pod \"ingress-operator-5b745b69d9-n9qkg\" (UID: \"fc0f4b15-4b92-4799-b77f-8735c85b2203\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.579950 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e0967d6a-234e-4f7b-b4ec-073e1822fec1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-zzmfv\" (UID: \"e0967d6a-234e-4f7b-b4ec-073e1822fec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.580032 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-client-ca\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.580113 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/53bbfcae-905c-4cb8-883d-e027fe0939b5-trusted-ca\") pod \"console-operator-58897d9998-hlvdp\" (UID: \"53bbfcae-905c-4cb8-883d-e027fe0939b5\") " pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.580229 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fc0f4b15-4b92-4799-b77f-8735c85b2203-trusted-ca\") pod \"ingress-operator-5b745b69d9-n9qkg\" (UID: \"fc0f4b15-4b92-4799-b77f-8735c85b2203\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.580293 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.580390 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcz94\" (UniqueName: \"kubernetes.io/projected/584babfe-a3c2-49da-97ac-c3a9fc90f102-kube-api-access-qcz94\") pod \"openshift-apiserver-operator-796bbdcf4f-bkbql\" (UID: \"584babfe-a3c2-49da-97ac-c3a9fc90f102\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.580467 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjzd6\" (UniqueName: \"kubernetes.io/projected/1445a993-db8f-4cdb-a89c-9f45a3ee0b4d-kube-api-access-rjzd6\") pod \"cluster-samples-operator-665b6dd947-brcnh\" (UID: \"1445a993-db8f-4cdb-a89c-9f45a3ee0b4d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.580549 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fc0f4b15-4b92-4799-b77f-8735c85b2203-bound-sa-token\") pod \"ingress-operator-5b745b69d9-n9qkg\" (UID: \"fc0f4b15-4b92-4799-b77f-8735c85b2203\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.580701 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.580740 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-audit\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.580741 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c-config\") pod \"machine-approver-56656f9798-mpfz6\" (UID: \"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.580802 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f17dee56-592e-45e7-8c4f-80854757d254-config\") pod \"route-controller-manager-6576b87f9c-cljtg\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.580863 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f17dee56-592e-45e7-8c4f-80854757d254-client-ca\") pod \"route-controller-manager-6576b87f9c-cljtg\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.580892 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9285876c-50d7-4dc8-a57d-d7a6db5cf3ce-profile-collector-cert\") pod \"catalog-operator-68c6474976-7sbbv\" (UID: \"9285876c-50d7-4dc8-a57d-d7a6db5cf3ce\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.582695 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc5g6\" (UniqueName: \"kubernetes.io/projected/dff327ac-2d87-47ce-ae92-f6e001b9d876-kube-api-access-wc5g6\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.582750 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1445a993-db8f-4cdb-a89c-9f45a3ee0b4d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-brcnh\" (UID: \"1445a993-db8f-4cdb-a89c-9f45a3ee0b4d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.582780 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/789ee928-afa8-424d-8810-6a04b2a7d5d6-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-r86xc\" (UID: \"789ee928-afa8-424d-8810-6a04b2a7d5d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.582796 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2922\" (UniqueName: \"kubernetes.io/projected/b495236a-11fa-48fb-9361-3c02fe062e4b-kube-api-access-b2922\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.582828 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94e2e017-59cc-4809-8244-a100190f35a9-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xhwpp\" (UID: \"94e2e017-59cc-4809-8244-a100190f35a9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.582859 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/789ee928-afa8-424d-8810-6a04b2a7d5d6-images\") pod \"machine-api-operator-5694c8668f-r86xc\" (UID: \"789ee928-afa8-424d-8810-6a04b2a7d5d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.582879 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5lnm\" (UniqueName: \"kubernetes.io/projected/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-kube-api-access-r5lnm\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.583169 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/dff327ac-2d87-47ce-ae92-f6e001b9d876-encryption-config\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.583789 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f17dee56-592e-45e7-8c4f-80854757d254-client-ca\") pod \"route-controller-manager-6576b87f9c-cljtg\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.584026 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c-machine-approver-tls\") pod \"machine-approver-56656f9798-mpfz6\" (UID: \"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.585118 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.585572 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f17dee56-592e-45e7-8c4f-80854757d254-serving-cert\") pod \"route-controller-manager-6576b87f9c-cljtg\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.585659 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.586083 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dff327ac-2d87-47ce-ae92-f6e001b9d876-serving-cert\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.587256 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.587455 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-md87h"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.589001 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fc0f4b15-4b92-4799-b77f-8735c85b2203-metrics-tls\") pod \"ingress-operator-5b745b69d9-n9qkg\" (UID: \"fc0f4b15-4b92-4799-b77f-8735c85b2203\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.589243 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dff327ac-2d87-47ce-ae92-f6e001b9d876-etcd-client\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.597785 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vgtf6"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.599893 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.600630 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.601581 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.603155 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tdl79"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.605316 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.605601 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.606357 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.607829 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-r86xc"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.609353 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.610940 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-gf4lh"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.612362 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-df648"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.613654 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-cxj9q"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.615248 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-75wjk"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.616632 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-r8tph"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.619325 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-2psv2"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.620304 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hlvdp"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.620338 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.620463 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-2psv2" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.622867 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.623946 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.624845 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.624970 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.625987 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.628125 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.629289 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cjsxd"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.630341 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xdzzr"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.631302 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.632321 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xbf9b"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.633580 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.635507 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.636732 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.637752 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.638827 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.639900 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.640928 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-r8pgh"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.642460 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-jj6x6"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.643055 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-r8pgh" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.645151 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.645191 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-2psv2"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.645237 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.645288 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.645538 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nt7nq"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.648924 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.650130 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.651263 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-ssdbb"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.652380 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-v86bf"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.653732 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-jj6x6"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.654902 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-r8pgh"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.656027 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-cv6jk"] Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.656951 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-cv6jk" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.675905 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.683867 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3970c1fd-c1a9-40a4-a2b5-276df544f222-serving-cert\") pod \"openshift-config-operator-7777fb866f-75wjk\" (UID: \"3970c1fd-c1a9-40a4-a2b5-276df544f222\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.683908 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.683935 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9285876c-50d7-4dc8-a57d-d7a6db5cf3ce-srv-cert\") pod \"catalog-operator-68c6474976-7sbbv\" (UID: \"9285876c-50d7-4dc8-a57d-d7a6db5cf3ce\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.683953 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.683971 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e0967d6a-234e-4f7b-b4ec-073e1822fec1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-zzmfv\" (UID: \"e0967d6a-234e-4f7b-b4ec-073e1822fec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.683998 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-config\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684018 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-oauth-serving-cert\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684035 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684055 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctkrp\" (UniqueName: \"kubernetes.io/projected/789ee928-afa8-424d-8810-6a04b2a7d5d6-kube-api-access-ctkrp\") pod \"machine-api-operator-5694c8668f-r86xc\" (UID: \"789ee928-afa8-424d-8810-6a04b2a7d5d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684074 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-service-ca\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684117 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5vf7\" (UniqueName: \"kubernetes.io/projected/2cf85f90-a707-4cbf-9cea-472b1109692d-kube-api-access-h5vf7\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684143 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/789ee928-afa8-424d-8810-6a04b2a7d5d6-config\") pod \"machine-api-operator-5694c8668f-r86xc\" (UID: \"789ee928-afa8-424d-8810-6a04b2a7d5d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684163 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-config\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684180 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-serving-cert\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684197 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/584babfe-a3c2-49da-97ac-c3a9fc90f102-config\") pod \"openshift-apiserver-operator-796bbdcf4f-bkbql\" (UID: \"584babfe-a3c2-49da-97ac-c3a9fc90f102\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684216 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nr77\" (UniqueName: \"kubernetes.io/projected/53bbfcae-905c-4cb8-883d-e027fe0939b5-kube-api-access-9nr77\") pod \"console-operator-58897d9998-hlvdp\" (UID: \"53bbfcae-905c-4cb8-883d-e027fe0939b5\") " pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684235 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-etcd-serving-ca\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684256 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-config\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684274 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684294 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/503191a0-1fb3-4b1e-84f1-ac3d702f686e-etcd-client\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684321 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e0967d6a-234e-4f7b-b4ec-073e1822fec1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-zzmfv\" (UID: \"e0967d6a-234e-4f7b-b4ec-073e1822fec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684339 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-client-ca\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684356 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/53bbfcae-905c-4cb8-883d-e027fe0939b5-trusted-ca\") pod \"console-operator-58897d9998-hlvdp\" (UID: \"53bbfcae-905c-4cb8-883d-e027fe0939b5\") " pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684375 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684392 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcz94\" (UniqueName: \"kubernetes.io/projected/584babfe-a3c2-49da-97ac-c3a9fc90f102-kube-api-access-qcz94\") pod \"openshift-apiserver-operator-796bbdcf4f-bkbql\" (UID: \"584babfe-a3c2-49da-97ac-c3a9fc90f102\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684411 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684428 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-audit\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684446 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjzd6\" (UniqueName: \"kubernetes.io/projected/1445a993-db8f-4cdb-a89c-9f45a3ee0b4d-kube-api-access-rjzd6\") pod \"cluster-samples-operator-665b6dd947-brcnh\" (UID: \"1445a993-db8f-4cdb-a89c-9f45a3ee0b4d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684486 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1445a993-db8f-4cdb-a89c-9f45a3ee0b4d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-brcnh\" (UID: \"1445a993-db8f-4cdb-a89c-9f45a3ee0b4d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684503 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9285876c-50d7-4dc8-a57d-d7a6db5cf3ce-profile-collector-cert\") pod \"catalog-operator-68c6474976-7sbbv\" (UID: \"9285876c-50d7-4dc8-a57d-d7a6db5cf3ce\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684522 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/789ee928-afa8-424d-8810-6a04b2a7d5d6-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-r86xc\" (UID: \"789ee928-afa8-424d-8810-6a04b2a7d5d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684540 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2922\" (UniqueName: \"kubernetes.io/projected/b495236a-11fa-48fb-9361-3c02fe062e4b-kube-api-access-b2922\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684557 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/789ee928-afa8-424d-8810-6a04b2a7d5d6-images\") pod \"machine-api-operator-5694c8668f-r86xc\" (UID: \"789ee928-afa8-424d-8810-6a04b2a7d5d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684574 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5lnm\" (UniqueName: \"kubernetes.io/projected/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-kube-api-access-r5lnm\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.684593 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94e2e017-59cc-4809-8244-a100190f35a9-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xhwpp\" (UID: \"94e2e017-59cc-4809-8244-a100190f35a9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.685058 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.685360 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-service-ca\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.685458 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-config\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.685572 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-client-ca\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686004 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-oauth-config\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686035 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/503191a0-1fb3-4b1e-84f1-ac3d702f686e-audit-dir\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686057 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/584babfe-a3c2-49da-97ac-c3a9fc90f102-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-bkbql\" (UID: \"584babfe-a3c2-49da-97ac-c3a9fc90f102\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686079 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltrhn\" (UniqueName: \"kubernetes.io/projected/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-kube-api-access-ltrhn\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686098 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-trusted-ca-bundle\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686117 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2cf85f90-a707-4cbf-9cea-472b1109692d-serving-cert\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686138 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-audit-policies\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686160 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686178 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-image-import-ca\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686198 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686220 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vdjq\" (UniqueName: \"kubernetes.io/projected/af4abbd6-d1b9-411d-9128-cc5b74a93eb5-kube-api-access-9vdjq\") pod \"downloads-7954f5f757-gf4lh\" (UID: \"af4abbd6-d1b9-411d-9128-cc5b74a93eb5\") " pod="openshift-console/downloads-7954f5f757-gf4lh" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686253 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686260 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686287 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-etcd-serving-ca\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686299 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-config\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686376 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686404 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94e2e017-59cc-4809-8244-a100190f35a9-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xhwpp\" (UID: \"94e2e017-59cc-4809-8244-a100190f35a9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686430 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b495236a-11fa-48fb-9361-3c02fe062e4b-audit-dir\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686451 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686475 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53bbfcae-905c-4cb8-883d-e027fe0939b5-config\") pod \"console-operator-58897d9998-hlvdp\" (UID: \"53bbfcae-905c-4cb8-883d-e027fe0939b5\") " pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686497 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-serving-cert\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686487 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/789ee928-afa8-424d-8810-6a04b2a7d5d6-config\") pod \"machine-api-operator-5694c8668f-r86xc\" (UID: \"789ee928-afa8-424d-8810-6a04b2a7d5d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686518 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686587 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/503191a0-1fb3-4b1e-84f1-ac3d702f686e-serving-cert\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686274 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-config\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.686668 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/503191a0-1fb3-4b1e-84f1-ac3d702f686e-encryption-config\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.687041 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53bbfcae-905c-4cb8-883d-e027fe0939b5-serving-cert\") pod \"console-operator-58897d9998-hlvdp\" (UID: \"53bbfcae-905c-4cb8-883d-e027fe0939b5\") " pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.687070 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/503191a0-1fb3-4b1e-84f1-ac3d702f686e-node-pullsecrets\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.687112 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.687486 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3970c1fd-c1a9-40a4-a2b5-276df544f222-available-featuregates\") pod \"openshift-config-operator-7777fb866f-75wjk\" (UID: \"3970c1fd-c1a9-40a4-a2b5-276df544f222\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.687535 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-config\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.687539 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.687920 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3970c1fd-c1a9-40a4-a2b5-276df544f222-serving-cert\") pod \"openshift-config-operator-7777fb866f-75wjk\" (UID: \"3970c1fd-c1a9-40a4-a2b5-276df544f222\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.688187 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-oauth-serving-cert\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.688263 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/503191a0-1fb3-4b1e-84f1-ac3d702f686e-node-pullsecrets\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.688445 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/53bbfcae-905c-4cb8-883d-e027fe0939b5-trusted-ca\") pod \"console-operator-58897d9998-hlvdp\" (UID: \"53bbfcae-905c-4cb8-883d-e027fe0939b5\") " pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.688538 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.689237 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e0967d6a-234e-4f7b-b4ec-073e1822fec1-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-zzmfv\" (UID: \"e0967d6a-234e-4f7b-b4ec-073e1822fec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.689681 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-config\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.690559 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/789ee928-afa8-424d-8810-6a04b2a7d5d6-images\") pod \"machine-api-operator-5694c8668f-r86xc\" (UID: \"789ee928-afa8-424d-8810-6a04b2a7d5d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.691600 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/503191a0-1fb3-4b1e-84f1-ac3d702f686e-audit-dir\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.691799 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.691985 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b495236a-11fa-48fb-9361-3c02fe062e4b-audit-dir\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.687109 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3970c1fd-c1a9-40a4-a2b5-276df544f222-available-featuregates\") pod \"openshift-config-operator-7777fb866f-75wjk\" (UID: \"3970c1fd-c1a9-40a4-a2b5-276df544f222\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.692060 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5b9j\" (UniqueName: \"kubernetes.io/projected/3970c1fd-c1a9-40a4-a2b5-276df544f222-kube-api-access-t5b9j\") pod \"openshift-config-operator-7777fb866f-75wjk\" (UID: \"3970c1fd-c1a9-40a4-a2b5-276df544f222\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.692090 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxnwp\" (UniqueName: \"kubernetes.io/projected/94e2e017-59cc-4809-8244-a100190f35a9-kube-api-access-pxnwp\") pod \"openshift-controller-manager-operator-756b6f6bc6-xhwpp\" (UID: \"94e2e017-59cc-4809-8244-a100190f35a9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.692135 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e0967d6a-234e-4f7b-b4ec-073e1822fec1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-zzmfv\" (UID: \"e0967d6a-234e-4f7b-b4ec-073e1822fec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.692160 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rw9b\" (UniqueName: \"kubernetes.io/projected/e0967d6a-234e-4f7b-b4ec-073e1822fec1-kube-api-access-9rw9b\") pod \"cluster-image-registry-operator-dc59b4c8b-zzmfv\" (UID: \"e0967d6a-234e-4f7b-b4ec-073e1822fec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.692180 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.692205 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7sp2\" (UniqueName: \"kubernetes.io/projected/9285876c-50d7-4dc8-a57d-d7a6db5cf3ce-kube-api-access-p7sp2\") pod \"catalog-operator-68c6474976-7sbbv\" (UID: \"9285876c-50d7-4dc8-a57d-d7a6db5cf3ce\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.692228 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-service-ca-bundle\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.692247 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.692274 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7shjk\" (UniqueName: \"kubernetes.io/projected/503191a0-1fb3-4b1e-84f1-ac3d702f686e-kube-api-access-7shjk\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.692682 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/503191a0-1fb3-4b1e-84f1-ac3d702f686e-encryption-config\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.692835 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-image-import-ca\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.693392 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-serving-cert\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.694087 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.694501 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.694503 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-trusted-ca-bundle\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.694585 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.694792 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94e2e017-59cc-4809-8244-a100190f35a9-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xhwpp\" (UID: \"94e2e017-59cc-4809-8244-a100190f35a9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.695052 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.695129 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94e2e017-59cc-4809-8244-a100190f35a9-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xhwpp\" (UID: \"94e2e017-59cc-4809-8244-a100190f35a9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.695854 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.695903 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53bbfcae-905c-4cb8-883d-e027fe0939b5-config\") pod \"console-operator-58897d9998-hlvdp\" (UID: \"53bbfcae-905c-4cb8-883d-e027fe0939b5\") " pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.696220 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-oauth-config\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.696292 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.696457 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/503191a0-1fb3-4b1e-84f1-ac3d702f686e-etcd-client\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.696529 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/789ee928-afa8-424d-8810-6a04b2a7d5d6-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-r86xc\" (UID: \"789ee928-afa8-424d-8810-6a04b2a7d5d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.696740 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/503191a0-1fb3-4b1e-84f1-ac3d702f686e-audit\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.696775 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2cf85f90-a707-4cbf-9cea-472b1109692d-serving-cert\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.697113 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-audit-policies\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.697369 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-service-ca-bundle\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.697486 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e0967d6a-234e-4f7b-b4ec-073e1822fec1-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-zzmfv\" (UID: \"e0967d6a-234e-4f7b-b4ec-073e1822fec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.698173 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/503191a0-1fb3-4b1e-84f1-ac3d702f686e-serving-cert\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.698240 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.698692 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-serving-cert\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.699015 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1445a993-db8f-4cdb-a89c-9f45a3ee0b4d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-brcnh\" (UID: \"1445a993-db8f-4cdb-a89c-9f45a3ee0b4d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.699472 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53bbfcae-905c-4cb8-883d-e027fe0939b5-serving-cert\") pod \"console-operator-58897d9998-hlvdp\" (UID: \"53bbfcae-905c-4cb8-883d-e027fe0939b5\") " pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.699983 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.705773 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.710426 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.725233 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.745011 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.764989 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.785000 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.805266 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.825119 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.846132 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.864758 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.885837 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.905187 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.925887 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.944457 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.965709 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 30 09:51:35 crc kubenswrapper[4730]: I0930 09:51:35.985248 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.004802 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.025570 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.044965 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.064550 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.084718 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.094183 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9285876c-50d7-4dc8-a57d-d7a6db5cf3ce-profile-collector-cert\") pod \"catalog-operator-68c6474976-7sbbv\" (UID: \"9285876c-50d7-4dc8-a57d-d7a6db5cf3ce\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.104911 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.125201 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.144286 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.149135 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9285876c-50d7-4dc8-a57d-d7a6db5cf3ce-srv-cert\") pod \"catalog-operator-68c6474976-7sbbv\" (UID: \"9285876c-50d7-4dc8-a57d-d7a6db5cf3ce\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.164550 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.185544 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.204463 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.224952 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.244759 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.285244 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.304888 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.326191 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.344185 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.365177 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.384683 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.404989 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.425879 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.444117 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.465771 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.484388 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.505304 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.514704 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/584babfe-a3c2-49da-97ac-c3a9fc90f102-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-bkbql\" (UID: \"584babfe-a3c2-49da-97ac-c3a9fc90f102\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.522960 4730 request.go:700] Waited for 1.010657141s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-apiserver-operator/configmaps?fieldSelector=metadata.name%3Dopenshift-apiserver-operator-config&limit=500&resourceVersion=0 Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.525215 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.529045 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/584babfe-a3c2-49da-97ac-c3a9fc90f102-config\") pod \"openshift-apiserver-operator-796bbdcf4f-bkbql\" (UID: \"584babfe-a3c2-49da-97ac-c3a9fc90f102\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.545402 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.565112 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.585778 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.605757 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.626179 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.644872 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.665384 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.685664 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.706341 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.732450 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.746517 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.784910 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.804981 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.824831 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.845410 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.865053 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.884632 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.904911 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.925023 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.945168 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.966362 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 30 09:51:36 crc kubenswrapper[4730]: I0930 09:51:36.988227 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.006073 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.026204 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.044371 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.078454 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwqfj\" (UniqueName: \"kubernetes.io/projected/77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c-kube-api-access-vwqfj\") pod \"machine-approver-56656f9798-mpfz6\" (UID: \"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.102330 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbb94\" (UniqueName: \"kubernetes.io/projected/f17dee56-592e-45e7-8c4f-80854757d254-kube-api-access-rbb94\") pod \"route-controller-manager-6576b87f9c-cljtg\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.118957 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfdtj\" (UniqueName: \"kubernetes.io/projected/fc0f4b15-4b92-4799-b77f-8735c85b2203-kube-api-access-gfdtj\") pod \"ingress-operator-5b745b69d9-n9qkg\" (UID: \"fc0f4b15-4b92-4799-b77f-8735c85b2203\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.144531 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fc0f4b15-4b92-4799-b77f-8735c85b2203-bound-sa-token\") pod \"ingress-operator-5b745b69d9-n9qkg\" (UID: \"fc0f4b15-4b92-4799-b77f-8735c85b2203\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.152372 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.163043 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.163946 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc5g6\" (UniqueName: \"kubernetes.io/projected/dff327ac-2d87-47ce-ae92-f6e001b9d876-kube-api-access-wc5g6\") pod \"apiserver-7bbb656c7d-vxqtl\" (UID: \"dff327ac-2d87-47ce-ae92-f6e001b9d876\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.164744 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.185120 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.205428 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.224708 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.244806 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.244902 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.266096 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.284957 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.305389 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.326364 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.345512 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.364669 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.377197 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg"] Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.380169 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.385994 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 30 09:51:37 crc kubenswrapper[4730]: W0930 09:51:37.389190 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc0f4b15_4b92_4799_b77f_8735c85b2203.slice/crio-15d98372ae4be913b34ebc2cbbbbedea5bd472b769a1374da74e2481f9022186 WatchSource:0}: Error finding container 15d98372ae4be913b34ebc2cbbbbedea5bd472b769a1374da74e2481f9022186: Status 404 returned error can't find the container with id 15d98372ae4be913b34ebc2cbbbbedea5bd472b769a1374da74e2481f9022186 Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.405086 4730 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.424629 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.446489 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.461687 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl"] Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.465952 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.488431 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.505170 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.523360 4730 request.go:700] Waited for 1.838528542s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-api/serviceaccounts/machine-api-operator/token Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.547323 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctkrp\" (UniqueName: \"kubernetes.io/projected/789ee928-afa8-424d-8810-6a04b2a7d5d6-kube-api-access-ctkrp\") pod \"machine-api-operator-5694c8668f-r86xc\" (UID: \"789ee928-afa8-424d-8810-6a04b2a7d5d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.561578 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e0967d6a-234e-4f7b-b4ec-073e1822fec1-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-zzmfv\" (UID: \"e0967d6a-234e-4f7b-b4ec-073e1822fec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.579648 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg"] Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.584546 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nr77\" (UniqueName: \"kubernetes.io/projected/53bbfcae-905c-4cb8-883d-e027fe0939b5-kube-api-access-9nr77\") pod \"console-operator-58897d9998-hlvdp\" (UID: \"53bbfcae-905c-4cb8-883d-e027fe0939b5\") " pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:37 crc kubenswrapper[4730]: W0930 09:51:37.593991 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf17dee56_592e_45e7_8c4f_80854757d254.slice/crio-e8e499b6f69810484c9872afcb0112e8fb5b2f5bf10c0a579fb9ed0c69e8a39e WatchSource:0}: Error finding container e8e499b6f69810484c9872afcb0112e8fb5b2f5bf10c0a579fb9ed0c69e8a39e: Status 404 returned error can't find the container with id e8e499b6f69810484c9872afcb0112e8fb5b2f5bf10c0a579fb9ed0c69e8a39e Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.610461 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5vf7\" (UniqueName: \"kubernetes.io/projected/2cf85f90-a707-4cbf-9cea-472b1109692d-kube-api-access-h5vf7\") pod \"controller-manager-879f6c89f-df648\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.624221 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcz94\" (UniqueName: \"kubernetes.io/projected/584babfe-a3c2-49da-97ac-c3a9fc90f102-kube-api-access-qcz94\") pod \"openshift-apiserver-operator-796bbdcf4f-bkbql\" (UID: \"584babfe-a3c2-49da-97ac-c3a9fc90f102\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.628317 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.647567 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2922\" (UniqueName: \"kubernetes.io/projected/b495236a-11fa-48fb-9361-3c02fe062e4b-kube-api-access-b2922\") pod \"oauth-openshift-558db77b4-r8tph\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.661920 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5lnm\" (UniqueName: \"kubernetes.io/projected/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-kube-api-access-r5lnm\") pod \"console-f9d7485db-md87h\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.681006 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltrhn\" (UniqueName: \"kubernetes.io/projected/4a7a5ec3-bbea-4979-a00d-25961a1fd8f6-kube-api-access-ltrhn\") pod \"authentication-operator-69f744f599-vgtf6\" (UID: \"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.681287 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.703404 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjzd6\" (UniqueName: \"kubernetes.io/projected/1445a993-db8f-4cdb-a89c-9f45a3ee0b4d-kube-api-access-rjzd6\") pod \"cluster-samples-operator-665b6dd947-brcnh\" (UID: \"1445a993-db8f-4cdb-a89c-9f45a3ee0b4d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.719870 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.721306 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxnwp\" (UniqueName: \"kubernetes.io/projected/94e2e017-59cc-4809-8244-a100190f35a9-kube-api-access-pxnwp\") pod \"openshift-controller-manager-operator-756b6f6bc6-xhwpp\" (UID: \"94e2e017-59cc-4809-8244-a100190f35a9\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.746698 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5b9j\" (UniqueName: \"kubernetes.io/projected/3970c1fd-c1a9-40a4-a2b5-276df544f222-kube-api-access-t5b9j\") pod \"openshift-config-operator-7777fb866f-75wjk\" (UID: \"3970c1fd-c1a9-40a4-a2b5-276df544f222\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.774163 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rw9b\" (UniqueName: \"kubernetes.io/projected/e0967d6a-234e-4f7b-b4ec-073e1822fec1-kube-api-access-9rw9b\") pod \"cluster-image-registry-operator-dc59b4c8b-zzmfv\" (UID: \"e0967d6a-234e-4f7b-b4ec-073e1822fec1\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.784987 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.789945 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vdjq\" (UniqueName: \"kubernetes.io/projected/af4abbd6-d1b9-411d-9128-cc5b74a93eb5-kube-api-access-9vdjq\") pod \"downloads-7954f5f757-gf4lh\" (UID: \"af4abbd6-d1b9-411d-9128-cc5b74a93eb5\") " pod="openshift-console/downloads-7954f5f757-gf4lh" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.801863 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7shjk\" (UniqueName: \"kubernetes.io/projected/503191a0-1fb3-4b1e-84f1-ac3d702f686e-kube-api-access-7shjk\") pod \"apiserver-76f77b778f-tdl79\" (UID: \"503191a0-1fb3-4b1e-84f1-ac3d702f686e\") " pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.822003 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.822688 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7sp2\" (UniqueName: \"kubernetes.io/projected/9285876c-50d7-4dc8-a57d-d7a6db5cf3ce-kube-api-access-p7sp2\") pod \"catalog-operator-68c6474976-7sbbv\" (UID: \"9285876c-50d7-4dc8-a57d-d7a6db5cf3ce\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.849778 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.865446 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.879948 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-df648"] Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.886621 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.907076 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936201 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/800a0b26-7e9b-458b-95a9-703c7ace905f-metrics-tls\") pod \"dns-operator-744455d44c-cjsxd\" (UID: \"800a0b26-7e9b-458b-95a9-703c7ace905f\") " pod="openshift-dns-operator/dns-operator-744455d44c-cjsxd" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936271 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-serving-cert\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936290 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b437c48c-f825-4a51-9076-acf5dcd25e36-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nt7nq\" (UID: \"b437c48c-f825-4a51-9076-acf5dcd25e36\") " pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936318 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b601ae3e-520b-43c1-8c4d-3946b0fb7cff-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-sfwd4\" (UID: \"b601ae3e-520b-43c1-8c4d-3946b0fb7cff\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936335 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16c0279b-a0e3-4400-be1a-c485c8ea0a34-service-ca-bundle\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936350 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21ce9670-1b8d-4896-a33b-b6dc5125dbdc-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9x2qr\" (UID: \"21ce9670-1b8d-4896-a33b-b6dc5125dbdc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936376 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/21ce9670-1b8d-4896-a33b-b6dc5125dbdc-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9x2qr\" (UID: \"21ce9670-1b8d-4896-a33b-b6dc5125dbdc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936439 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f28s2\" (UniqueName: \"kubernetes.io/projected/9297e80b-31ef-46af-98dc-8770fb66a889-kube-api-access-f28s2\") pod \"migrator-59844c95c7-v86bf\" (UID: \"9297e80b-31ef-46af-98dc-8770fb66a889\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v86bf" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936470 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f427l\" (UniqueName: \"kubernetes.io/projected/778ecda6-96a8-46f1-89e5-79c372bfc123-kube-api-access-f427l\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkmzt\" (UID: \"778ecda6-96a8-46f1-89e5-79c372bfc123\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936514 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-etcd-service-ca\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936551 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdca31a9-3f6d-402a-9a77-2e5156016199-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-v6vtc\" (UID: \"cdca31a9-3f6d-402a-9a77-2e5156016199\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936577 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-config\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936593 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rck22\" (UniqueName: \"kubernetes.io/projected/435a6f87-b787-4c31-b41f-8013e1aaae11-kube-api-access-rck22\") pod \"package-server-manager-789f6589d5-wqstr\" (UID: \"435a6f87-b787-4c31-b41f-8013e1aaae11\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936674 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-registry-tls\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936702 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/435a6f87-b787-4c31-b41f-8013e1aaae11-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-wqstr\" (UID: \"435a6f87-b787-4c31-b41f-8013e1aaae11\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936728 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a3cec9a5-601c-4ec9-a147-88a53c19de69-proxy-tls\") pod \"machine-config-controller-84d6567774-9ckzv\" (UID: \"a3cec9a5-601c-4ec9-a147-88a53c19de69\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936768 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-etcd-ca\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936802 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3f90d05f-7820-4af0-8894-6d63dc672f33-registry-certificates\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936826 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9jlw\" (UniqueName: \"kubernetes.io/projected/800a0b26-7e9b-458b-95a9-703c7ace905f-kube-api-access-c9jlw\") pod \"dns-operator-744455d44c-cjsxd\" (UID: \"800a0b26-7e9b-458b-95a9-703c7ace905f\") " pod="openshift-dns-operator/dns-operator-744455d44c-cjsxd" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936863 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdca31a9-3f6d-402a-9a77-2e5156016199-config\") pod \"kube-controller-manager-operator-78b949d7b-v6vtc\" (UID: \"cdca31a9-3f6d-402a-9a77-2e5156016199\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936880 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3f90d05f-7820-4af0-8894-6d63dc672f33-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936895 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cx7zs\" (UniqueName: \"kubernetes.io/projected/b437c48c-f825-4a51-9076-acf5dcd25e36-kube-api-access-cx7zs\") pod \"marketplace-operator-79b997595-nt7nq\" (UID: \"b437c48c-f825-4a51-9076-acf5dcd25e36\") " pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936965 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/16c0279b-a0e3-4400-be1a-c485c8ea0a34-metrics-certs\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.936982 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzwzh\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-kube-api-access-rzwzh\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937004 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/778ecda6-96a8-46f1-89e5-79c372bfc123-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkmzt\" (UID: \"778ecda6-96a8-46f1-89e5-79c372bfc123\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937020 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cdca31a9-3f6d-402a-9a77-2e5156016199-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-v6vtc\" (UID: \"cdca31a9-3f6d-402a-9a77-2e5156016199\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937047 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-etcd-client\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937077 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9qrg\" (UniqueName: \"kubernetes.io/projected/16c0279b-a0e3-4400-be1a-c485c8ea0a34-kube-api-access-g9qrg\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937093 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a3cec9a5-601c-4ec9-a147-88a53c19de69-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9ckzv\" (UID: \"a3cec9a5-601c-4ec9-a147-88a53c19de69\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937127 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/778ecda6-96a8-46f1-89e5-79c372bfc123-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkmzt\" (UID: \"778ecda6-96a8-46f1-89e5-79c372bfc123\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937142 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b601ae3e-520b-43c1-8c4d-3946b0fb7cff-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-sfwd4\" (UID: \"b601ae3e-520b-43c1-8c4d-3946b0fb7cff\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937162 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/27d3e752-f501-41c6-aed5-aa0e58103802-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xbf9b\" (UID: \"27d3e752-f501-41c6-aed5-aa0e58103802\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xbf9b" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937224 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/16c0279b-a0e3-4400-be1a-c485c8ea0a34-stats-auth\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937269 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/16c0279b-a0e3-4400-be1a-c485c8ea0a34-default-certificate\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937319 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b601ae3e-520b-43c1-8c4d-3946b0fb7cff-config\") pod \"kube-apiserver-operator-766d6c64bb-sfwd4\" (UID: \"b601ae3e-520b-43c1-8c4d-3946b0fb7cff\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937335 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw7wq\" (UniqueName: \"kubernetes.io/projected/a3cec9a5-601c-4ec9-a147-88a53c19de69-kube-api-access-hw7wq\") pod \"machine-config-controller-84d6567774-9ckzv\" (UID: \"a3cec9a5-601c-4ec9-a147-88a53c19de69\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937352 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3f90d05f-7820-4af0-8894-6d63dc672f33-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937368 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21ce9670-1b8d-4896-a33b-b6dc5125dbdc-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9x2qr\" (UID: \"21ce9670-1b8d-4896-a33b-b6dc5125dbdc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937409 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937450 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlrsw\" (UniqueName: \"kubernetes.io/projected/27d3e752-f501-41c6-aed5-aa0e58103802-kube-api-access-dlrsw\") pod \"multus-admission-controller-857f4d67dd-xbf9b\" (UID: \"27d3e752-f501-41c6-aed5-aa0e58103802\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xbf9b" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937479 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b437c48c-f825-4a51-9076-acf5dcd25e36-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nt7nq\" (UID: \"b437c48c-f825-4a51-9076-acf5dcd25e36\") " pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937510 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3f90d05f-7820-4af0-8894-6d63dc672f33-trusted-ca\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937527 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-bound-sa-token\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.937573 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv44v\" (UniqueName: \"kubernetes.io/projected/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-kube-api-access-cv44v\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.938861 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.941869 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" Sep 30 09:51:37 crc kubenswrapper[4730]: E0930 09:51:37.942176 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:38.442159115 +0000 UTC m=+142.775419108 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:37 crc kubenswrapper[4730]: I0930 09:51:37.967886 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.012471 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.034329 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.039574 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:38 crc kubenswrapper[4730]: E0930 09:51:38.039793 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:38.539775742 +0000 UTC m=+142.873035725 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.040034 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b437c48c-f825-4a51-9076-acf5dcd25e36-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nt7nq\" (UID: \"b437c48c-f825-4a51-9076-acf5dcd25e36\") " pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.041521 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-serving-cert\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.041713 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b601ae3e-520b-43c1-8c4d-3946b0fb7cff-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-sfwd4\" (UID: \"b601ae3e-520b-43c1-8c4d-3946b0fb7cff\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.041818 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c7fc44ff-ad31-4a42-9946-a3285b56bafb-proxy-tls\") pod \"machine-config-operator-74547568cd-vgdj9\" (UID: \"c7fc44ff-ad31-4a42-9946-a3285b56bafb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.041903 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9v9vq\" (UniqueName: \"kubernetes.io/projected/c7fc44ff-ad31-4a42-9946-a3285b56bafb-kube-api-access-9v9vq\") pod \"machine-config-operator-74547568cd-vgdj9\" (UID: \"c7fc44ff-ad31-4a42-9946-a3285b56bafb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.042013 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16c0279b-a0e3-4400-be1a-c485c8ea0a34-service-ca-bundle\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.042095 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21ce9670-1b8d-4896-a33b-b6dc5125dbdc-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9x2qr\" (UID: \"21ce9670-1b8d-4896-a33b-b6dc5125dbdc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.042196 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/21ce9670-1b8d-4896-a33b-b6dc5125dbdc-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9x2qr\" (UID: \"21ce9670-1b8d-4896-a33b-b6dc5125dbdc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.043007 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/17099759-4076-40ae-b158-b00e2d947421-node-bootstrap-token\") pod \"machine-config-server-cv6jk\" (UID: \"17099759-4076-40ae-b158-b00e2d947421\") " pod="openshift-machine-config-operator/machine-config-server-cv6jk" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.046781 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f28s2\" (UniqueName: \"kubernetes.io/projected/9297e80b-31ef-46af-98dc-8770fb66a889-kube-api-access-f28s2\") pod \"migrator-59844c95c7-v86bf\" (UID: \"9297e80b-31ef-46af-98dc-8770fb66a889\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v86bf" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.047129 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/175ee2dd-3d4e-4554-b13b-de02e542c8a3-signing-key\") pod \"service-ca-9c57cc56f-ssdbb\" (UID: \"175ee2dd-3d4e-4554-b13b-de02e542c8a3\") " pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.047235 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j45x8\" (UniqueName: \"kubernetes.io/projected/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-kube-api-access-j45x8\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.047324 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f427l\" (UniqueName: \"kubernetes.io/projected/778ecda6-96a8-46f1-89e5-79c372bfc123-kube-api-access-f427l\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkmzt\" (UID: \"778ecda6-96a8-46f1-89e5-79c372bfc123\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.047412 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-csi-data-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.047494 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-etcd-service-ca\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.047569 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbsdf\" (UniqueName: \"kubernetes.io/projected/4658f792-b8d4-4b22-92b7-46f6c8944eba-kube-api-access-qbsdf\") pod \"dns-default-r8pgh\" (UID: \"4658f792-b8d4-4b22-92b7-46f6c8944eba\") " pod="openshift-dns/dns-default-r8pgh" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.047660 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-config\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.047736 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdca31a9-3f6d-402a-9a77-2e5156016199-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-v6vtc\" (UID: \"cdca31a9-3f6d-402a-9a77-2e5156016199\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.047817 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rck22\" (UniqueName: \"kubernetes.io/projected/435a6f87-b787-4c31-b41f-8013e1aaae11-kube-api-access-rck22\") pod \"package-server-manager-789f6589d5-wqstr\" (UID: \"435a6f87-b787-4c31-b41f-8013e1aaae11\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.047985 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47a443f7-7d61-447b-b119-29dcd51b1b18-secret-volume\") pod \"collect-profiles-29320425-gwwdz\" (UID: \"47a443f7-7d61-447b-b119-29dcd51b1b18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.048112 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-registration-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.048288 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-registry-tls\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.048363 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-socket-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.048393 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e15284e8-6f97-47e8-a08b-ad3d3a87de94-serving-cert\") pod \"service-ca-operator-777779d784-f4hwj\" (UID: \"e15284e8-6f97-47e8-a08b-ad3d3a87de94\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.048428 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-mountpoint-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.048458 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/435a6f87-b787-4c31-b41f-8013e1aaae11-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-wqstr\" (UID: \"435a6f87-b787-4c31-b41f-8013e1aaae11\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.048501 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a3cec9a5-601c-4ec9-a147-88a53c19de69-proxy-tls\") pod \"machine-config-controller-84d6567774-9ckzv\" (UID: \"a3cec9a5-601c-4ec9-a147-88a53c19de69\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.048552 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-etcd-ca\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.048577 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c7fc44ff-ad31-4a42-9946-a3285b56bafb-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vgdj9\" (UID: \"c7fc44ff-ad31-4a42-9946-a3285b56bafb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.048628 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4658f792-b8d4-4b22-92b7-46f6c8944eba-metrics-tls\") pod \"dns-default-r8pgh\" (UID: \"4658f792-b8d4-4b22-92b7-46f6c8944eba\") " pod="openshift-dns/dns-default-r8pgh" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.069899 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c7fc44ff-ad31-4a42-9946-a3285b56bafb-images\") pod \"machine-config-operator-74547568cd-vgdj9\" (UID: \"c7fc44ff-ad31-4a42-9946-a3285b56bafb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.069977 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3f90d05f-7820-4af0-8894-6d63dc672f33-registry-certificates\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070011 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdca31a9-3f6d-402a-9a77-2e5156016199-config\") pod \"kube-controller-manager-operator-78b949d7b-v6vtc\" (UID: \"cdca31a9-3f6d-402a-9a77-2e5156016199\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070050 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9jlw\" (UniqueName: \"kubernetes.io/projected/800a0b26-7e9b-458b-95a9-703c7ace905f-kube-api-access-c9jlw\") pod \"dns-operator-744455d44c-cjsxd\" (UID: \"800a0b26-7e9b-458b-95a9-703c7ace905f\") " pod="openshift-dns-operator/dns-operator-744455d44c-cjsxd" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070143 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3f90d05f-7820-4af0-8894-6d63dc672f33-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070171 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxjhj\" (UniqueName: \"kubernetes.io/projected/c8aaf03c-1a0b-4346-99be-5367b88685bd-kube-api-access-jxjhj\") pod \"packageserver-d55dfcdfc-b829k\" (UID: \"c8aaf03c-1a0b-4346-99be-5367b88685bd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070282 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cx7zs\" (UniqueName: \"kubernetes.io/projected/b437c48c-f825-4a51-9076-acf5dcd25e36-kube-api-access-cx7zs\") pod \"marketplace-operator-79b997595-nt7nq\" (UID: \"b437c48c-f825-4a51-9076-acf5dcd25e36\") " pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070311 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4658f792-b8d4-4b22-92b7-46f6c8944eba-config-volume\") pod \"dns-default-r8pgh\" (UID: \"4658f792-b8d4-4b22-92b7-46f6c8944eba\") " pod="openshift-dns/dns-default-r8pgh" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070337 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ba9ae093-0fce-4e5d-a1c9-f199cb2ba008-profile-collector-cert\") pod \"olm-operator-6b444d44fb-cffv2\" (UID: \"ba9ae093-0fce-4e5d-a1c9-f199cb2ba008\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070409 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvcwd\" (UniqueName: \"kubernetes.io/projected/e15284e8-6f97-47e8-a08b-ad3d3a87de94-kube-api-access-fvcwd\") pod \"service-ca-operator-777779d784-f4hwj\" (UID: \"e15284e8-6f97-47e8-a08b-ad3d3a87de94\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070463 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/175ee2dd-3d4e-4554-b13b-de02e542c8a3-signing-cabundle\") pod \"service-ca-9c57cc56f-ssdbb\" (UID: \"175ee2dd-3d4e-4554-b13b-de02e542c8a3\") " pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070522 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/16c0279b-a0e3-4400-be1a-c485c8ea0a34-metrics-certs\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070547 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8z9zr\" (UniqueName: \"kubernetes.io/projected/175ee2dd-3d4e-4554-b13b-de02e542c8a3-kube-api-access-8z9zr\") pod \"service-ca-9c57cc56f-ssdbb\" (UID: \"175ee2dd-3d4e-4554-b13b-de02e542c8a3\") " pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070594 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzwzh\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-kube-api-access-rzwzh\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070639 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/eb3285d0-7f84-46d0-9c21-136e077b813a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-27l2q\" (UID: \"eb3285d0-7f84-46d0-9c21-136e077b813a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070680 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z88m2\" (UniqueName: \"kubernetes.io/projected/ba9ae093-0fce-4e5d-a1c9-f199cb2ba008-kube-api-access-z88m2\") pod \"olm-operator-6b444d44fb-cffv2\" (UID: \"ba9ae093-0fce-4e5d-a1c9-f199cb2ba008\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.070976 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/778ecda6-96a8-46f1-89e5-79c372bfc123-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkmzt\" (UID: \"778ecda6-96a8-46f1-89e5-79c372bfc123\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.071029 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cdca31a9-3f6d-402a-9a77-2e5156016199-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-v6vtc\" (UID: \"cdca31a9-3f6d-402a-9a77-2e5156016199\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.071049 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwgbw\" (UniqueName: \"kubernetes.io/projected/eb3285d0-7f84-46d0-9c21-136e077b813a-kube-api-access-bwgbw\") pod \"control-plane-machine-set-operator-78cbb6b69f-27l2q\" (UID: \"eb3285d0-7f84-46d0-9c21-136e077b813a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.071071 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-etcd-client\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.071119 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9qrg\" (UniqueName: \"kubernetes.io/projected/16c0279b-a0e3-4400-be1a-c485c8ea0a34-kube-api-access-g9qrg\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.071143 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a3cec9a5-601c-4ec9-a147-88a53c19de69-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9ckzv\" (UID: \"a3cec9a5-601c-4ec9-a147-88a53c19de69\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.071197 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b601ae3e-520b-43c1-8c4d-3946b0fb7cff-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-sfwd4\" (UID: \"b601ae3e-520b-43c1-8c4d-3946b0fb7cff\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.071218 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/27d3e752-f501-41c6-aed5-aa0e58103802-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xbf9b\" (UID: \"27d3e752-f501-41c6-aed5-aa0e58103802\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xbf9b" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.071261 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/778ecda6-96a8-46f1-89e5-79c372bfc123-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkmzt\" (UID: \"778ecda6-96a8-46f1-89e5-79c372bfc123\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.071284 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/17099759-4076-40ae-b158-b00e2d947421-certs\") pod \"machine-config-server-cv6jk\" (UID: \"17099759-4076-40ae-b158-b00e2d947421\") " pod="openshift-machine-config-operator/machine-config-server-cv6jk" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.071306 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/16c0279b-a0e3-4400-be1a-c485c8ea0a34-stats-auth\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.071345 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/c8aaf03c-1a0b-4346-99be-5367b88685bd-tmpfs\") pod \"packageserver-d55dfcdfc-b829k\" (UID: \"c8aaf03c-1a0b-4346-99be-5367b88685bd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.071366 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/16c0279b-a0e3-4400-be1a-c485c8ea0a34-default-certificate\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.048749 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16c0279b-a0e3-4400-be1a-c485c8ea0a34-service-ca-bundle\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.049902 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-config\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.051856 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-serving-cert\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.071436 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b601ae3e-520b-43c1-8c4d-3946b0fb7cff-config\") pod \"kube-apiserver-operator-766d6c64bb-sfwd4\" (UID: \"b601ae3e-520b-43c1-8c4d-3946b0fb7cff\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.054679 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-etcd-service-ca\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.054946 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-etcd-ca\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.058519 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b601ae3e-520b-43c1-8c4d-3946b0fb7cff-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-sfwd4\" (UID: \"b601ae3e-520b-43c1-8c4d-3946b0fb7cff\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.058929 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/435a6f87-b787-4c31-b41f-8013e1aaae11-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-wqstr\" (UID: \"435a6f87-b787-4c31-b41f-8013e1aaae11\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.059218 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-registry-tls\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.059362 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21ce9670-1b8d-4896-a33b-b6dc5125dbdc-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9x2qr\" (UID: \"21ce9670-1b8d-4896-a33b-b6dc5125dbdc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.064156 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a3cec9a5-601c-4ec9-a147-88a53c19de69-proxy-tls\") pod \"machine-config-controller-84d6567774-9ckzv\" (UID: \"a3cec9a5-601c-4ec9-a147-88a53c19de69\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.073732 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3f90d05f-7820-4af0-8894-6d63dc672f33-registry-certificates\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.074416 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b437c48c-f825-4a51-9076-acf5dcd25e36-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-nt7nq\" (UID: \"b437c48c-f825-4a51-9076-acf5dcd25e36\") " pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.074455 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw7wq\" (UniqueName: \"kubernetes.io/projected/a3cec9a5-601c-4ec9-a147-88a53c19de69-kube-api-access-hw7wq\") pod \"machine-config-controller-84d6567774-9ckzv\" (UID: \"a3cec9a5-601c-4ec9-a147-88a53c19de69\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.074479 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-plugins-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.074520 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ba9ae093-0fce-4e5d-a1c9-f199cb2ba008-srv-cert\") pod \"olm-operator-6b444d44fb-cffv2\" (UID: \"ba9ae093-0fce-4e5d-a1c9-f199cb2ba008\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.074551 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3f90d05f-7820-4af0-8894-6d63dc672f33-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.074575 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e15284e8-6f97-47e8-a08b-ad3d3a87de94-config\") pod \"service-ca-operator-777779d784-f4hwj\" (UID: \"e15284e8-6f97-47e8-a08b-ad3d3a87de94\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.074620 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.074643 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21ce9670-1b8d-4896-a33b-b6dc5125dbdc-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9x2qr\" (UID: \"21ce9670-1b8d-4896-a33b-b6dc5125dbdc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.074665 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzx9h\" (UniqueName: \"kubernetes.io/projected/d3e12ca7-c78f-491d-bca2-3af653c9bf98-kube-api-access-hzx9h\") pod \"ingress-canary-2psv2\" (UID: \"d3e12ca7-c78f-491d-bca2-3af653c9bf98\") " pod="openshift-ingress-canary/ingress-canary-2psv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.074687 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d3e12ca7-c78f-491d-bca2-3af653c9bf98-cert\") pod \"ingress-canary-2psv2\" (UID: \"d3e12ca7-c78f-491d-bca2-3af653c9bf98\") " pod="openshift-ingress-canary/ingress-canary-2psv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.074746 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlrsw\" (UniqueName: \"kubernetes.io/projected/27d3e752-f501-41c6-aed5-aa0e58103802-kube-api-access-dlrsw\") pod \"multus-admission-controller-857f4d67dd-xbf9b\" (UID: \"27d3e752-f501-41c6-aed5-aa0e58103802\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xbf9b" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.074766 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47a443f7-7d61-447b-b119-29dcd51b1b18-config-volume\") pod \"collect-profiles-29320425-gwwdz\" (UID: \"47a443f7-7d61-447b-b119-29dcd51b1b18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.075306 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdca31a9-3f6d-402a-9a77-2e5156016199-config\") pod \"kube-controller-manager-operator-78b949d7b-v6vtc\" (UID: \"cdca31a9-3f6d-402a-9a77-2e5156016199\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.076284 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b601ae3e-520b-43c1-8c4d-3946b0fb7cff-config\") pod \"kube-apiserver-operator-766d6c64bb-sfwd4\" (UID: \"b601ae3e-520b-43c1-8c4d-3946b0fb7cff\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.076298 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/778ecda6-96a8-46f1-89e5-79c372bfc123-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkmzt\" (UID: \"778ecda6-96a8-46f1-89e5-79c372bfc123\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.076530 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3f90d05f-7820-4af0-8894-6d63dc672f33-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.078884 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-gf4lh" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.079738 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/16c0279b-a0e3-4400-be1a-c485c8ea0a34-stats-auth\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.083344 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-etcd-client\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:38 crc kubenswrapper[4730]: E0930 09:51:38.085185 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:38.585142081 +0000 UTC m=+142.918402074 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.086182 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3f90d05f-7820-4af0-8894-6d63dc672f33-trusted-ca\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.086226 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-bound-sa-token\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.086264 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b437c48c-f825-4a51-9076-acf5dcd25e36-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nt7nq\" (UID: \"b437c48c-f825-4a51-9076-acf5dcd25e36\") " pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.086301 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c8aaf03c-1a0b-4346-99be-5367b88685bd-webhook-cert\") pod \"packageserver-d55dfcdfc-b829k\" (UID: \"c8aaf03c-1a0b-4346-99be-5367b88685bd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.087954 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a3cec9a5-601c-4ec9-a147-88a53c19de69-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-9ckzv\" (UID: \"a3cec9a5-601c-4ec9-a147-88a53c19de69\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.088753 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21ce9670-1b8d-4896-a33b-b6dc5125dbdc-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9x2qr\" (UID: \"21ce9670-1b8d-4896-a33b-b6dc5125dbdc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.089669 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c8aaf03c-1a0b-4346-99be-5367b88685bd-apiservice-cert\") pod \"packageserver-d55dfcdfc-b829k\" (UID: \"c8aaf03c-1a0b-4346-99be-5367b88685bd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.089761 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv44v\" (UniqueName: \"kubernetes.io/projected/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-kube-api-access-cv44v\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.089803 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r56fk\" (UniqueName: \"kubernetes.io/projected/47a443f7-7d61-447b-b119-29dcd51b1b18-kube-api-access-r56fk\") pod \"collect-profiles-29320425-gwwdz\" (UID: \"47a443f7-7d61-447b-b119-29dcd51b1b18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.090038 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x25cf\" (UniqueName: \"kubernetes.io/projected/17099759-4076-40ae-b158-b00e2d947421-kube-api-access-x25cf\") pod \"machine-config-server-cv6jk\" (UID: \"17099759-4076-40ae-b158-b00e2d947421\") " pod="openshift-machine-config-operator/machine-config-server-cv6jk" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.090359 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3f90d05f-7820-4af0-8894-6d63dc672f33-trusted-ca\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.090498 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/800a0b26-7e9b-458b-95a9-703c7ace905f-metrics-tls\") pod \"dns-operator-744455d44c-cjsxd\" (UID: \"800a0b26-7e9b-458b-95a9-703c7ace905f\") " pod="openshift-dns-operator/dns-operator-744455d44c-cjsxd" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.091756 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b437c48c-f825-4a51-9076-acf5dcd25e36-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-nt7nq\" (UID: \"b437c48c-f825-4a51-9076-acf5dcd25e36\") " pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.092261 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/778ecda6-96a8-46f1-89e5-79c372bfc123-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkmzt\" (UID: \"778ecda6-96a8-46f1-89e5-79c372bfc123\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" Sep 30 09:51:38 crc kubenswrapper[4730]: W0930 09:51:38.092412 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod584babfe_a3c2_49da_97ac_c3a9fc90f102.slice/crio-be3565587373a8bc0a0ff27cf8419bc46956c1b34a5958a20cc4de44745f2080 WatchSource:0}: Error finding container be3565587373a8bc0a0ff27cf8419bc46956c1b34a5958a20cc4de44745f2080: Status 404 returned error can't find the container with id be3565587373a8bc0a0ff27cf8419bc46956c1b34a5958a20cc4de44745f2080 Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.096090 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/800a0b26-7e9b-458b-95a9-703c7ace905f-metrics-tls\") pod \"dns-operator-744455d44c-cjsxd\" (UID: \"800a0b26-7e9b-458b-95a9-703c7ace905f\") " pod="openshift-dns-operator/dns-operator-744455d44c-cjsxd" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.096389 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/16c0279b-a0e3-4400-be1a-c485c8ea0a34-default-certificate\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.096818 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/27d3e752-f501-41c6-aed5-aa0e58103802-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xbf9b\" (UID: \"27d3e752-f501-41c6-aed5-aa0e58103802\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xbf9b" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.099420 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-r86xc"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.100023 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/21ce9670-1b8d-4896-a33b-b6dc5125dbdc-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9x2qr\" (UID: \"21ce9670-1b8d-4896-a33b-b6dc5125dbdc\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.099982 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3f90d05f-7820-4af0-8894-6d63dc672f33-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.100399 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/16c0279b-a0e3-4400-be1a-c485c8ea0a34-metrics-certs\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.103277 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdca31a9-3f6d-402a-9a77-2e5156016199-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-v6vtc\" (UID: \"cdca31a9-3f6d-402a-9a77-2e5156016199\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.115232 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f28s2\" (UniqueName: \"kubernetes.io/projected/9297e80b-31ef-46af-98dc-8770fb66a889-kube-api-access-f28s2\") pod \"migrator-59844c95c7-v86bf\" (UID: \"9297e80b-31ef-46af-98dc-8770fb66a889\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v86bf" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.122472 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" event={"ID":"f17dee56-592e-45e7-8c4f-80854757d254","Type":"ContainerStarted","Data":"62f11e9dd36c905303831657d65e23490b04c6ff4753450b5fac2d04c02f21e2"} Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.122557 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" event={"ID":"f17dee56-592e-45e7-8c4f-80854757d254","Type":"ContainerStarted","Data":"e8e499b6f69810484c9872afcb0112e8fb5b2f5bf10c0a579fb9ed0c69e8a39e"} Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.123375 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.124719 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" event={"ID":"584babfe-a3c2-49da-97ac-c3a9fc90f102","Type":"ContainerStarted","Data":"be3565587373a8bc0a0ff27cf8419bc46956c1b34a5958a20cc4de44745f2080"} Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.131577 4730 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-cljtg container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.131669 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" podUID="f17dee56-592e-45e7-8c4f-80854757d254" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.155396 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f427l\" (UniqueName: \"kubernetes.io/projected/778ecda6-96a8-46f1-89e5-79c372bfc123-kube-api-access-f427l\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkmzt\" (UID: \"778ecda6-96a8-46f1-89e5-79c372bfc123\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.155658 4730 generic.go:334] "Generic (PLEG): container finished" podID="dff327ac-2d87-47ce-ae92-f6e001b9d876" containerID="1c0a8019eb1576ca99c9e8e157295639dc1cf1a45fde2c4483a88cb4d1f4f0dc" exitCode=0 Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.155751 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" event={"ID":"dff327ac-2d87-47ce-ae92-f6e001b9d876","Type":"ContainerDied","Data":"1c0a8019eb1576ca99c9e8e157295639dc1cf1a45fde2c4483a88cb4d1f4f0dc"} Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.155791 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" event={"ID":"dff327ac-2d87-47ce-ae92-f6e001b9d876","Type":"ContainerStarted","Data":"abdfbde8090198eb0294d8e32f033b1689ca2634849d5971aea73696442f984e"} Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.171754 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rck22\" (UniqueName: \"kubernetes.io/projected/435a6f87-b787-4c31-b41f-8013e1aaae11-kube-api-access-rck22\") pod \"package-server-manager-789f6589d5-wqstr\" (UID: \"435a6f87-b787-4c31-b41f-8013e1aaae11\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.174256 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-r8tph"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.180393 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzwzh\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-kube-api-access-rzwzh\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.181708 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" event={"ID":"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c","Type":"ContainerStarted","Data":"216f9174f1eed9cb0126c66cba9b4ae6e4ee61667d643d07b29008d8d29a9f1d"} Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.181828 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" event={"ID":"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c","Type":"ContainerStarted","Data":"f947ed6bdcabbcd4f7d638399056aeb787da5ba191eb224e22eca3e2541e536a"} Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.184116 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" event={"ID":"77e50cc1-ad3e-4d41-ac6a-0bf534f94b8c","Type":"ContainerStarted","Data":"22c9cd21049c295af9c78e352d5ebca84f8804d3f54cd809b76b9981001fccae"} Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.192888 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193153 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4658f792-b8d4-4b22-92b7-46f6c8944eba-config-volume\") pod \"dns-default-r8pgh\" (UID: \"4658f792-b8d4-4b22-92b7-46f6c8944eba\") " pod="openshift-dns/dns-default-r8pgh" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193189 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ba9ae093-0fce-4e5d-a1c9-f199cb2ba008-profile-collector-cert\") pod \"olm-operator-6b444d44fb-cffv2\" (UID: \"ba9ae093-0fce-4e5d-a1c9-f199cb2ba008\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193230 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvcwd\" (UniqueName: \"kubernetes.io/projected/e15284e8-6f97-47e8-a08b-ad3d3a87de94-kube-api-access-fvcwd\") pod \"service-ca-operator-777779d784-f4hwj\" (UID: \"e15284e8-6f97-47e8-a08b-ad3d3a87de94\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193267 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/175ee2dd-3d4e-4554-b13b-de02e542c8a3-signing-cabundle\") pod \"service-ca-9c57cc56f-ssdbb\" (UID: \"175ee2dd-3d4e-4554-b13b-de02e542c8a3\") " pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193286 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8z9zr\" (UniqueName: \"kubernetes.io/projected/175ee2dd-3d4e-4554-b13b-de02e542c8a3-kube-api-access-8z9zr\") pod \"service-ca-9c57cc56f-ssdbb\" (UID: \"175ee2dd-3d4e-4554-b13b-de02e542c8a3\") " pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193309 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/eb3285d0-7f84-46d0-9c21-136e077b813a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-27l2q\" (UID: \"eb3285d0-7f84-46d0-9c21-136e077b813a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193335 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z88m2\" (UniqueName: \"kubernetes.io/projected/ba9ae093-0fce-4e5d-a1c9-f199cb2ba008-kube-api-access-z88m2\") pod \"olm-operator-6b444d44fb-cffv2\" (UID: \"ba9ae093-0fce-4e5d-a1c9-f199cb2ba008\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193367 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwgbw\" (UniqueName: \"kubernetes.io/projected/eb3285d0-7f84-46d0-9c21-136e077b813a-kube-api-access-bwgbw\") pod \"control-plane-machine-set-operator-78cbb6b69f-27l2q\" (UID: \"eb3285d0-7f84-46d0-9c21-136e077b813a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193455 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/17099759-4076-40ae-b158-b00e2d947421-certs\") pod \"machine-config-server-cv6jk\" (UID: \"17099759-4076-40ae-b158-b00e2d947421\") " pod="openshift-machine-config-operator/machine-config-server-cv6jk" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193496 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/c8aaf03c-1a0b-4346-99be-5367b88685bd-tmpfs\") pod \"packageserver-d55dfcdfc-b829k\" (UID: \"c8aaf03c-1a0b-4346-99be-5367b88685bd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193547 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-plugins-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193572 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ba9ae093-0fce-4e5d-a1c9-f199cb2ba008-srv-cert\") pod \"olm-operator-6b444d44fb-cffv2\" (UID: \"ba9ae093-0fce-4e5d-a1c9-f199cb2ba008\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193598 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e15284e8-6f97-47e8-a08b-ad3d3a87de94-config\") pod \"service-ca-operator-777779d784-f4hwj\" (UID: \"e15284e8-6f97-47e8-a08b-ad3d3a87de94\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193649 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzx9h\" (UniqueName: \"kubernetes.io/projected/d3e12ca7-c78f-491d-bca2-3af653c9bf98-kube-api-access-hzx9h\") pod \"ingress-canary-2psv2\" (UID: \"d3e12ca7-c78f-491d-bca2-3af653c9bf98\") " pod="openshift-ingress-canary/ingress-canary-2psv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193669 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d3e12ca7-c78f-491d-bca2-3af653c9bf98-cert\") pod \"ingress-canary-2psv2\" (UID: \"d3e12ca7-c78f-491d-bca2-3af653c9bf98\") " pod="openshift-ingress-canary/ingress-canary-2psv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193697 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47a443f7-7d61-447b-b119-29dcd51b1b18-config-volume\") pod \"collect-profiles-29320425-gwwdz\" (UID: \"47a443f7-7d61-447b-b119-29dcd51b1b18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193720 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c8aaf03c-1a0b-4346-99be-5367b88685bd-webhook-cert\") pod \"packageserver-d55dfcdfc-b829k\" (UID: \"c8aaf03c-1a0b-4346-99be-5367b88685bd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193736 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c8aaf03c-1a0b-4346-99be-5367b88685bd-apiservice-cert\") pod \"packageserver-d55dfcdfc-b829k\" (UID: \"c8aaf03c-1a0b-4346-99be-5367b88685bd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193762 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r56fk\" (UniqueName: \"kubernetes.io/projected/47a443f7-7d61-447b-b119-29dcd51b1b18-kube-api-access-r56fk\") pod \"collect-profiles-29320425-gwwdz\" (UID: \"47a443f7-7d61-447b-b119-29dcd51b1b18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193782 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x25cf\" (UniqueName: \"kubernetes.io/projected/17099759-4076-40ae-b158-b00e2d947421-kube-api-access-x25cf\") pod \"machine-config-server-cv6jk\" (UID: \"17099759-4076-40ae-b158-b00e2d947421\") " pod="openshift-machine-config-operator/machine-config-server-cv6jk" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193811 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c7fc44ff-ad31-4a42-9946-a3285b56bafb-proxy-tls\") pod \"machine-config-operator-74547568cd-vgdj9\" (UID: \"c7fc44ff-ad31-4a42-9946-a3285b56bafb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193830 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9v9vq\" (UniqueName: \"kubernetes.io/projected/c7fc44ff-ad31-4a42-9946-a3285b56bafb-kube-api-access-9v9vq\") pod \"machine-config-operator-74547568cd-vgdj9\" (UID: \"c7fc44ff-ad31-4a42-9946-a3285b56bafb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193849 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/17099759-4076-40ae-b158-b00e2d947421-node-bootstrap-token\") pod \"machine-config-server-cv6jk\" (UID: \"17099759-4076-40ae-b158-b00e2d947421\") " pod="openshift-machine-config-operator/machine-config-server-cv6jk" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193876 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/175ee2dd-3d4e-4554-b13b-de02e542c8a3-signing-key\") pod \"service-ca-9c57cc56f-ssdbb\" (UID: \"175ee2dd-3d4e-4554-b13b-de02e542c8a3\") " pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193895 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j45x8\" (UniqueName: \"kubernetes.io/projected/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-kube-api-access-j45x8\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193929 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-csi-data-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193944 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbsdf\" (UniqueName: \"kubernetes.io/projected/4658f792-b8d4-4b22-92b7-46f6c8944eba-kube-api-access-qbsdf\") pod \"dns-default-r8pgh\" (UID: \"4658f792-b8d4-4b22-92b7-46f6c8944eba\") " pod="openshift-dns/dns-default-r8pgh" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.193969 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47a443f7-7d61-447b-b119-29dcd51b1b18-secret-volume\") pod \"collect-profiles-29320425-gwwdz\" (UID: \"47a443f7-7d61-447b-b119-29dcd51b1b18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.194001 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-registration-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.194026 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-socket-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.194043 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e15284e8-6f97-47e8-a08b-ad3d3a87de94-serving-cert\") pod \"service-ca-operator-777779d784-f4hwj\" (UID: \"e15284e8-6f97-47e8-a08b-ad3d3a87de94\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.194063 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-mountpoint-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.194090 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c7fc44ff-ad31-4a42-9946-a3285b56bafb-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vgdj9\" (UID: \"c7fc44ff-ad31-4a42-9946-a3285b56bafb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.194107 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4658f792-b8d4-4b22-92b7-46f6c8944eba-metrics-tls\") pod \"dns-default-r8pgh\" (UID: \"4658f792-b8d4-4b22-92b7-46f6c8944eba\") " pod="openshift-dns/dns-default-r8pgh" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.194121 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c7fc44ff-ad31-4a42-9946-a3285b56bafb-images\") pod \"machine-config-operator-74547568cd-vgdj9\" (UID: \"c7fc44ff-ad31-4a42-9946-a3285b56bafb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.194164 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxjhj\" (UniqueName: \"kubernetes.io/projected/c8aaf03c-1a0b-4346-99be-5367b88685bd-kube-api-access-jxjhj\") pod \"packageserver-d55dfcdfc-b829k\" (UID: \"c8aaf03c-1a0b-4346-99be-5367b88685bd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:38 crc kubenswrapper[4730]: E0930 09:51:38.194424 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:38.694407401 +0000 UTC m=+143.027667394 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.195156 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4658f792-b8d4-4b22-92b7-46f6c8944eba-config-volume\") pod \"dns-default-r8pgh\" (UID: \"4658f792-b8d4-4b22-92b7-46f6c8944eba\") " pod="openshift-dns/dns-default-r8pgh" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.198188 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" event={"ID":"2cf85f90-a707-4cbf-9cea-472b1109692d","Type":"ContainerStarted","Data":"50211809c53b56e94bea9f53cc50e89d26440b98fe4118fccc26fd81e08a0b50"} Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.198451 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-mountpoint-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.199051 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.199172 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47a443f7-7d61-447b-b119-29dcd51b1b18-config-volume\") pod \"collect-profiles-29320425-gwwdz\" (UID: \"47a443f7-7d61-447b-b119-29dcd51b1b18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.199228 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-registration-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.199352 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-plugins-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.199636 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-socket-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.199798 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/175ee2dd-3d4e-4554-b13b-de02e542c8a3-signing-cabundle\") pod \"service-ca-9c57cc56f-ssdbb\" (UID: \"175ee2dd-3d4e-4554-b13b-de02e542c8a3\") " pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.199919 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/c8aaf03c-1a0b-4346-99be-5367b88685bd-tmpfs\") pod \"packageserver-d55dfcdfc-b829k\" (UID: \"c8aaf03c-1a0b-4346-99be-5367b88685bd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.203703 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v86bf" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.206123 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-csi-data-dir\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.206924 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e15284e8-6f97-47e8-a08b-ad3d3a87de94-config\") pod \"service-ca-operator-777779d784-f4hwj\" (UID: \"e15284e8-6f97-47e8-a08b-ad3d3a87de94\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.207282 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c7fc44ff-ad31-4a42-9946-a3285b56bafb-images\") pod \"machine-config-operator-74547568cd-vgdj9\" (UID: \"c7fc44ff-ad31-4a42-9946-a3285b56bafb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.207480 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c8aaf03c-1a0b-4346-99be-5367b88685bd-apiservice-cert\") pod \"packageserver-d55dfcdfc-b829k\" (UID: \"c8aaf03c-1a0b-4346-99be-5367b88685bd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.208858 4730 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-df648 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.208921 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" podUID="2cf85f90-a707-4cbf-9cea-472b1109692d" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.209424 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c7fc44ff-ad31-4a42-9946-a3285b56bafb-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vgdj9\" (UID: \"c7fc44ff-ad31-4a42-9946-a3285b56bafb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.211088 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/17099759-4076-40ae-b158-b00e2d947421-certs\") pod \"machine-config-server-cv6jk\" (UID: \"17099759-4076-40ae-b158-b00e2d947421\") " pod="openshift-machine-config-operator/machine-config-server-cv6jk" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.211716 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cdca31a9-3f6d-402a-9a77-2e5156016199-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-v6vtc\" (UID: \"cdca31a9-3f6d-402a-9a77-2e5156016199\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.211897 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" event={"ID":"fc0f4b15-4b92-4799-b77f-8735c85b2203","Type":"ContainerStarted","Data":"5ce11416178fcc38d7495dfd4637824513994422961de435d4e50938b24c0f52"} Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.211928 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" event={"ID":"fc0f4b15-4b92-4799-b77f-8735c85b2203","Type":"ContainerStarted","Data":"1c0ae2d5cdc2e15b839c3f6ba2cf671815b78f4656aa03f9a1f39d4092583b70"} Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.211937 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" event={"ID":"fc0f4b15-4b92-4799-b77f-8735c85b2203","Type":"ContainerStarted","Data":"15d98372ae4be913b34ebc2cbbbbedea5bd472b769a1374da74e2481f9022186"} Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.214120 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47a443f7-7d61-447b-b119-29dcd51b1b18-secret-volume\") pod \"collect-profiles-29320425-gwwdz\" (UID: \"47a443f7-7d61-447b-b119-29dcd51b1b18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.214227 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ba9ae093-0fce-4e5d-a1c9-f199cb2ba008-srv-cert\") pod \"olm-operator-6b444d44fb-cffv2\" (UID: \"ba9ae093-0fce-4e5d-a1c9-f199cb2ba008\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.215084 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ba9ae093-0fce-4e5d-a1c9-f199cb2ba008-profile-collector-cert\") pod \"olm-operator-6b444d44fb-cffv2\" (UID: \"ba9ae093-0fce-4e5d-a1c9-f199cb2ba008\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.215897 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/175ee2dd-3d4e-4554-b13b-de02e542c8a3-signing-key\") pod \"service-ca-9c57cc56f-ssdbb\" (UID: \"175ee2dd-3d4e-4554-b13b-de02e542c8a3\") " pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.216648 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9jlw\" (UniqueName: \"kubernetes.io/projected/800a0b26-7e9b-458b-95a9-703c7ace905f-kube-api-access-c9jlw\") pod \"dns-operator-744455d44c-cjsxd\" (UID: \"800a0b26-7e9b-458b-95a9-703c7ace905f\") " pod="openshift-dns-operator/dns-operator-744455d44c-cjsxd" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.217943 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d3e12ca7-c78f-491d-bca2-3af653c9bf98-cert\") pod \"ingress-canary-2psv2\" (UID: \"d3e12ca7-c78f-491d-bca2-3af653c9bf98\") " pod="openshift-ingress-canary/ingress-canary-2psv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.219072 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c8aaf03c-1a0b-4346-99be-5367b88685bd-webhook-cert\") pod \"packageserver-d55dfcdfc-b829k\" (UID: \"c8aaf03c-1a0b-4346-99be-5367b88685bd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.219389 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.219434 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4658f792-b8d4-4b22-92b7-46f6c8944eba-metrics-tls\") pod \"dns-default-r8pgh\" (UID: \"4658f792-b8d4-4b22-92b7-46f6c8944eba\") " pod="openshift-dns/dns-default-r8pgh" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.220545 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e15284e8-6f97-47e8-a08b-ad3d3a87de94-serving-cert\") pod \"service-ca-operator-777779d784-f4hwj\" (UID: \"e15284e8-6f97-47e8-a08b-ad3d3a87de94\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.228218 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/eb3285d0-7f84-46d0-9c21-136e077b813a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-27l2q\" (UID: \"eb3285d0-7f84-46d0-9c21-136e077b813a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.228416 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/17099759-4076-40ae-b158-b00e2d947421-node-bootstrap-token\") pod \"machine-config-server-cv6jk\" (UID: \"17099759-4076-40ae-b158-b00e2d947421\") " pod="openshift-machine-config-operator/machine-config-server-cv6jk" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.237326 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c7fc44ff-ad31-4a42-9946-a3285b56bafb-proxy-tls\") pod \"machine-config-operator-74547568cd-vgdj9\" (UID: \"c7fc44ff-ad31-4a42-9946-a3285b56bafb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.249311 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cx7zs\" (UniqueName: \"kubernetes.io/projected/b437c48c-f825-4a51-9076-acf5dcd25e36-kube-api-access-cx7zs\") pod \"marketplace-operator-79b997595-nt7nq\" (UID: \"b437c48c-f825-4a51-9076-acf5dcd25e36\") " pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.251128 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.259286 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.266441 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9qrg\" (UniqueName: \"kubernetes.io/projected/16c0279b-a0e3-4400-be1a-c485c8ea0a34-kube-api-access-g9qrg\") pod \"router-default-5444994796-m2sph\" (UID: \"16c0279b-a0e3-4400-be1a-c485c8ea0a34\") " pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.272429 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.292421 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlrsw\" (UniqueName: \"kubernetes.io/projected/27d3e752-f501-41c6-aed5-aa0e58103802-kube-api-access-dlrsw\") pod \"multus-admission-controller-857f4d67dd-xbf9b\" (UID: \"27d3e752-f501-41c6-aed5-aa0e58103802\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xbf9b" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.296050 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: E0930 09:51:38.301307 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:38.801278515 +0000 UTC m=+143.134538498 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.308784 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b601ae3e-520b-43c1-8c4d-3946b0fb7cff-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-sfwd4\" (UID: \"b601ae3e-520b-43c1-8c4d-3946b0fb7cff\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.316071 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.316302 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.322792 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-bound-sa-token\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.354392 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw7wq\" (UniqueName: \"kubernetes.io/projected/a3cec9a5-601c-4ec9-a147-88a53c19de69-kube-api-access-hw7wq\") pod \"machine-config-controller-84d6567774-9ckzv\" (UID: \"a3cec9a5-601c-4ec9-a147-88a53c19de69\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.372965 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv44v\" (UniqueName: \"kubernetes.io/projected/555ceaf9-a5fa-41b9-ba91-92bd420e0c1b-kube-api-access-cv44v\") pod \"etcd-operator-b45778765-cxj9q\" (UID: \"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.387295 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxjhj\" (UniqueName: \"kubernetes.io/projected/c8aaf03c-1a0b-4346-99be-5367b88685bd-kube-api-access-jxjhj\") pod \"packageserver-d55dfcdfc-b829k\" (UID: \"c8aaf03c-1a0b-4346-99be-5367b88685bd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.401050 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:38 crc kubenswrapper[4730]: E0930 09:51:38.401279 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:38.901251037 +0000 UTC m=+143.234511030 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.401811 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: E0930 09:51:38.402199 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:38.902189684 +0000 UTC m=+143.235449677 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.402559 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-md87h"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.406304 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x25cf\" (UniqueName: \"kubernetes.io/projected/17099759-4076-40ae-b158-b00e2d947421-kube-api-access-x25cf\") pod \"machine-config-server-cv6jk\" (UID: \"17099759-4076-40ae-b158-b00e2d947421\") " pod="openshift-machine-config-operator/machine-config-server-cv6jk" Sep 30 09:51:38 crc kubenswrapper[4730]: W0930 09:51:38.440557 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c83ea01_beb2_4b6c_b67b_93cea3b56ca7.slice/crio-2de6eda61f3f00561f50c1d9d504a95ddad54a6f8cd6886437e5f18497c121f2 WatchSource:0}: Error finding container 2de6eda61f3f00561f50c1d9d504a95ddad54a6f8cd6886437e5f18497c121f2: Status 404 returned error can't find the container with id 2de6eda61f3f00561f50c1d9d504a95ddad54a6f8cd6886437e5f18497c121f2 Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.441461 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzx9h\" (UniqueName: \"kubernetes.io/projected/d3e12ca7-c78f-491d-bca2-3af653c9bf98-kube-api-access-hzx9h\") pod \"ingress-canary-2psv2\" (UID: \"d3e12ca7-c78f-491d-bca2-3af653c9bf98\") " pod="openshift-ingress-canary/ingress-canary-2psv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.446987 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-cv6jk" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.458645 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-cjsxd" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.464181 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbsdf\" (UniqueName: \"kubernetes.io/projected/4658f792-b8d4-4b22-92b7-46f6c8944eba-kube-api-access-qbsdf\") pod \"dns-default-r8pgh\" (UID: \"4658f792-b8d4-4b22-92b7-46f6c8944eba\") " pod="openshift-dns/dns-default-r8pgh" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.464283 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vgtf6"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.469378 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9v9vq\" (UniqueName: \"kubernetes.io/projected/c7fc44ff-ad31-4a42-9946-a3285b56bafb-kube-api-access-9v9vq\") pod \"machine-config-operator-74547568cd-vgdj9\" (UID: \"c7fc44ff-ad31-4a42-9946-a3285b56bafb\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.471194 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-xbf9b" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.473046 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hlvdp"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.478210 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tdl79"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.490194 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j45x8\" (UniqueName: \"kubernetes.io/projected/f1f19ecd-c701-4a85-85a4-05f74f6d6f60-kube-api-access-j45x8\") pod \"csi-hostpathplugin-jj6x6\" (UID: \"f1f19ecd-c701-4a85-85a4-05f74f6d6f60\") " pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.502022 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.502636 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:38 crc kubenswrapper[4730]: E0930 09:51:38.503049 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:39.003027531 +0000 UTC m=+143.336287524 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.509629 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8z9zr\" (UniqueName: \"kubernetes.io/projected/175ee2dd-3d4e-4554-b13b-de02e542c8a3-kube-api-access-8z9zr\") pod \"service-ca-9c57cc56f-ssdbb\" (UID: \"175ee2dd-3d4e-4554-b13b-de02e542c8a3\") " pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.511866 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.529516 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.543441 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-75wjk"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.583565 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-gf4lh"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.590960 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" Sep 30 09:51:38 crc kubenswrapper[4730]: E0930 09:51:38.606765 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:39.106736927 +0000 UTC m=+143.439996920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.617468 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z88m2\" (UniqueName: \"kubernetes.io/projected/ba9ae093-0fce-4e5d-a1c9-f199cb2ba008-kube-api-access-z88m2\") pod \"olm-operator-6b444d44fb-cffv2\" (UID: \"ba9ae093-0fce-4e5d-a1c9-f199cb2ba008\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.620593 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r56fk\" (UniqueName: \"kubernetes.io/projected/47a443f7-7d61-447b-b119-29dcd51b1b18-kube-api-access-r56fk\") pod \"collect-profiles-29320425-gwwdz\" (UID: \"47a443f7-7d61-447b-b119-29dcd51b1b18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.621042 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.623770 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.625015 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvcwd\" (UniqueName: \"kubernetes.io/projected/e15284e8-6f97-47e8-a08b-ad3d3a87de94-kube-api-access-fvcwd\") pod \"service-ca-operator-777779d784-f4hwj\" (UID: \"e15284e8-6f97-47e8-a08b-ad3d3a87de94\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.629214 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwgbw\" (UniqueName: \"kubernetes.io/projected/eb3285d0-7f84-46d0-9c21-136e077b813a-kube-api-access-bwgbw\") pod \"control-plane-machine-set-operator-78cbb6b69f-27l2q\" (UID: \"eb3285d0-7f84-46d0-9c21-136e077b813a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.629428 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.638680 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.649400 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.649790 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-2psv2" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.658768 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.660343 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.665776 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.666151 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.669428 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.676205 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.685458 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.702289 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-r8pgh" Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.725331 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:38 crc kubenswrapper[4730]: E0930 09:51:38.726456 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:39.226415977 +0000 UTC m=+143.559675990 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.733684 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" Sep 30 09:51:38 crc kubenswrapper[4730]: W0930 09:51:38.745645 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf4abbd6_d1b9_411d_9128_cc5b74a93eb5.slice/crio-1a8dddba176f1a731dfd66c046644b79fb8519f989a94890f4bace624533ad9d WatchSource:0}: Error finding container 1a8dddba176f1a731dfd66c046644b79fb8519f989a94890f4bace624533ad9d: Status 404 returned error can't find the container with id 1a8dddba176f1a731dfd66c046644b79fb8519f989a94890f4bace624533ad9d Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.826842 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: E0930 09:51:38.827268 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:39.327255524 +0000 UTC m=+143.660515517 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.844137 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr"] Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.928702 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:38 crc kubenswrapper[4730]: E0930 09:51:38.928878 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:39.428849421 +0000 UTC m=+143.762109424 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.928974 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:38 crc kubenswrapper[4730]: E0930 09:51:38.929473 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:39.429464518 +0000 UTC m=+143.762724511 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:38 crc kubenswrapper[4730]: I0930 09:51:38.950454 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-v86bf"] Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.030790 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:39 crc kubenswrapper[4730]: E0930 09:51:39.031183 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:39.531140898 +0000 UTC m=+143.864400881 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.135607 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:39 crc kubenswrapper[4730]: E0930 09:51:39.135984 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:39.635971415 +0000 UTC m=+143.969231398 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.234649 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" event={"ID":"435a6f87-b787-4c31-b41f-8013e1aaae11","Type":"ContainerStarted","Data":"5de123b827978a7d812bddf50b7168320c850fe9205d2ca37292a11ef1ae4f43"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.238074 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:39 crc kubenswrapper[4730]: E0930 09:51:39.238527 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:39.738511819 +0000 UTC m=+144.071771812 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.257937 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-md87h" event={"ID":"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7","Type":"ContainerStarted","Data":"2de6eda61f3f00561f50c1d9d504a95ddad54a6f8cd6886437e5f18497c121f2"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.272917 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" event={"ID":"94e2e017-59cc-4809-8244-a100190f35a9","Type":"ContainerStarted","Data":"47071d79004806f0af7c883470661ccfc44df9fe833073843d1e847f572d5488"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.286028 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-gf4lh" event={"ID":"af4abbd6-d1b9-411d-9128-cc5b74a93eb5","Type":"ContainerStarted","Data":"1a8dddba176f1a731dfd66c046644b79fb8519f989a94890f4bace624533ad9d"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.286683 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc"] Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.293528 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" event={"ID":"584babfe-a3c2-49da-97ac-c3a9fc90f102","Type":"ContainerStarted","Data":"613b28e2edc803cc856db0b54a7904f269a36db8204e9459eff3e6373ae45588"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.311212 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh" event={"ID":"1445a993-db8f-4cdb-a89c-9f45a3ee0b4d","Type":"ContainerStarted","Data":"4292a51511c19f5ac0d7e1c6a39fb786ed6e6c4148e71600918d91c70780e171"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.325069 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tdl79" event={"ID":"503191a0-1fb3-4b1e-84f1-ac3d702f686e","Type":"ContainerStarted","Data":"f84818c6bb87fc194493c2f0c4904bb8e4c67414cc959e10222655a5188c82c6"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.356299 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.357548 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" event={"ID":"dff327ac-2d87-47ce-ae92-f6e001b9d876","Type":"ContainerStarted","Data":"2a1940fae154276873cc803a538e5e08410a19bcd686c706dbf5a1414969b269"} Sep 30 09:51:39 crc kubenswrapper[4730]: E0930 09:51:39.357954 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:39.857926732 +0000 UTC m=+144.191186725 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.422436 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt"] Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.426424 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" event={"ID":"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6","Type":"ContainerStarted","Data":"c8f620c8a4a1ee7c86327ce6f429d81d113e0ea287877363bb418e91e13d2b8f"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.441504 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xbf9b"] Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.454732 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" event={"ID":"2cf85f90-a707-4cbf-9cea-472b1109692d","Type":"ContainerStarted","Data":"87d1080c0946298811ddf6ccfb942c971dbcbe7efa93ff6436bac7476cd24528"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.455424 4730 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-df648 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.455493 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" podUID="2cf85f90-a707-4cbf-9cea-472b1109692d" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.461691 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:39 crc kubenswrapper[4730]: E0930 09:51:39.463713 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:39.963685994 +0000 UTC m=+144.296946017 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.466050 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" event={"ID":"789ee928-afa8-424d-8810-6a04b2a7d5d6","Type":"ContainerStarted","Data":"fe3c0f343810329252bef8304ce0a4001bdf46548458814a082744f7934f37c0"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.466093 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" event={"ID":"789ee928-afa8-424d-8810-6a04b2a7d5d6","Type":"ContainerStarted","Data":"0e0c3e8457b068c20747bfe34d8cfce549d43fdcb92442ee3806c377a042fb87"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.475327 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" event={"ID":"21ce9670-1b8d-4896-a33b-b6dc5125dbdc","Type":"ContainerStarted","Data":"747ef7035ce9d82e8435131c64213b8b642dc97c75d743f766557d96866fc821"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.483625 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" event={"ID":"3970c1fd-c1a9-40a4-a2b5-276df544f222","Type":"ContainerStarted","Data":"5b67f8026867d9414551347ddfb3fde9278ccd8d267fccea118e9b36769cca8c"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.486181 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4"] Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.513193 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-hlvdp" event={"ID":"53bbfcae-905c-4cb8-883d-e027fe0939b5","Type":"ContainerStarted","Data":"0ede88d030b12105fb06f8e5af0a6a12a928d7523e4594d1c996ae8e40100092"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.525786 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-cxj9q"] Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.534667 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nt7nq"] Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.544210 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" event={"ID":"b495236a-11fa-48fb-9361-3c02fe062e4b","Type":"ContainerStarted","Data":"d7af9e9acf05f0dd17224908aecedffc3be82c81e7e22d6d4d307a1a7c3e7c7d"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.544250 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" event={"ID":"b495236a-11fa-48fb-9361-3c02fe062e4b","Type":"ContainerStarted","Data":"2864527e5f6d07b051e16a793960fc187558d7e9c691c4dfe2a9307641ab9a0d"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.549558 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.553920 4730 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-r8tph container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.16:6443/healthz\": dial tcp 10.217.0.16:6443: connect: connection refused" start-of-body= Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.554039 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" podUID="b495236a-11fa-48fb-9361-3c02fe062e4b" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.16:6443/healthz\": dial tcp 10.217.0.16:6443: connect: connection refused" Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.563830 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:39 crc kubenswrapper[4730]: E0930 09:51:39.564248 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:40.064229063 +0000 UTC m=+144.397489056 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:39 crc kubenswrapper[4730]: W0930 09:51:39.585749 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb601ae3e_520b_43c1_8c4d_3946b0fb7cff.slice/crio-294935712d0c627490e4e0d0f523dc0a54e73182e2f20c1841f479c58350ec92 WatchSource:0}: Error finding container 294935712d0c627490e4e0d0f523dc0a54e73182e2f20c1841f479c58350ec92: Status 404 returned error can't find the container with id 294935712d0c627490e4e0d0f523dc0a54e73182e2f20c1841f479c58350ec92 Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.621281 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" event={"ID":"e0967d6a-234e-4f7b-b4ec-073e1822fec1","Type":"ContainerStarted","Data":"cdb0023bbb383401a4c75f8296728ca89bd066c82490e6973feb2e852cc70426"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.626711 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-cjsxd"] Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.629378 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" event={"ID":"9285876c-50d7-4dc8-a57d-d7a6db5cf3ce","Type":"ContainerStarted","Data":"fb53cf5514f5061bf5cf8880ccac3eee7ed45a9265d030df61843f85b1ffd516"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.652016 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-m2sph" event={"ID":"16c0279b-a0e3-4400-be1a-c485c8ea0a34","Type":"ContainerStarted","Data":"6a63968bfb18d8c3cf7965a4195fffb950e08b4c912ac8ce56dd1f89d629f1e8"} Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.660278 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.669813 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:39 crc kubenswrapper[4730]: E0930 09:51:39.670904 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:40.17084184 +0000 UTC m=+144.504101833 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.683036 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mpfz6" podStartSLOduration=122.682995617 podStartE2EDuration="2m2.682995617s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:39.676034424 +0000 UTC m=+144.009294427" watchObservedRunningTime="2025-09-30 09:51:39.682995617 +0000 UTC m=+144.016255610" Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.757516 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" podStartSLOduration=122.757494713 podStartE2EDuration="2m2.757494713s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:39.75699762 +0000 UTC m=+144.090257623" watchObservedRunningTime="2025-09-30 09:51:39.757494713 +0000 UTC m=+144.090754706" Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.773364 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:39 crc kubenswrapper[4730]: E0930 09:51:39.773820 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:40.273798606 +0000 UTC m=+144.607058619 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.860913 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-n9qkg" podStartSLOduration=122.860891141 podStartE2EDuration="2m2.860891141s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:39.850582726 +0000 UTC m=+144.183842719" watchObservedRunningTime="2025-09-30 09:51:39.860891141 +0000 UTC m=+144.194151134" Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.874641 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:39 crc kubenswrapper[4730]: E0930 09:51:39.875039 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:40.375022523 +0000 UTC m=+144.708282516 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.887779 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bkbql" podStartSLOduration=122.887762496 podStartE2EDuration="2m2.887762496s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:39.885292828 +0000 UTC m=+144.218552831" watchObservedRunningTime="2025-09-30 09:51:39.887762496 +0000 UTC m=+144.221022499" Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.955883 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-2psv2"] Sep 30 09:51:39 crc kubenswrapper[4730]: I0930 09:51:39.988123 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:39 crc kubenswrapper[4730]: E0930 09:51:39.988642 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:40.488628094 +0000 UTC m=+144.821888087 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.018541 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" podStartSLOduration=122.018524503 podStartE2EDuration="2m2.018524503s" podCreationTimestamp="2025-09-30 09:49:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:40.004955317 +0000 UTC m=+144.338215310" watchObservedRunningTime="2025-09-30 09:51:40.018524503 +0000 UTC m=+144.351784496" Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.088853 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:40 crc kubenswrapper[4730]: E0930 09:51:40.089175 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:40.589160672 +0000 UTC m=+144.922420665 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.154932 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv"] Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.192089 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:40 crc kubenswrapper[4730]: E0930 09:51:40.192714 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:40.692698424 +0000 UTC m=+145.025958417 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.204747 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj"] Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.212834 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q"] Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.216406 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" podStartSLOduration=123.216388481 podStartE2EDuration="2m3.216388481s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:40.205470428 +0000 UTC m=+144.538730431" watchObservedRunningTime="2025-09-30 09:51:40.216388481 +0000 UTC m=+144.549648494" Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.259209 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k"] Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.265029 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" podStartSLOduration=123.26500072 podStartE2EDuration="2m3.26500072s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:40.245294142 +0000 UTC m=+144.578554135" watchObservedRunningTime="2025-09-30 09:51:40.26500072 +0000 UTC m=+144.598260713" Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.295187 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:40 crc kubenswrapper[4730]: E0930 09:51:40.295777 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:40.795762312 +0000 UTC m=+145.129022305 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.398339 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:40 crc kubenswrapper[4730]: E0930 09:51:40.398828 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:40.89881155 +0000 UTC m=+145.232071543 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.416055 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2"] Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.416103 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-ssdbb"] Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.498961 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:40 crc kubenswrapper[4730]: E0930 09:51:40.499396 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:40.99937875 +0000 UTC m=+145.332638753 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.551569 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz"] Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.602278 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:40 crc kubenswrapper[4730]: E0930 09:51:40.602759 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:41.102743977 +0000 UTC m=+145.436004050 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.668590 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-r8pgh"] Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.679741 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9"] Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.685754 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-jj6x6"] Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.696047 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-md87h" event={"ID":"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7","Type":"ContainerStarted","Data":"82fd6408e8d281795129803e3ea26c13aef10ee065b8cdef3cba62d99ca559f7"} Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.702857 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:40 crc kubenswrapper[4730]: E0930 09:51:40.703175 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:41.203160561 +0000 UTC m=+145.536420554 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.703934 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" event={"ID":"3970c1fd-c1a9-40a4-a2b5-276df544f222","Type":"ContainerStarted","Data":"6fd5bfc4611b4d5ce043248cc06c1d394b1fd36764ad6f79d4e2a8bcedd764fd"} Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.717803 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-2psv2" event={"ID":"d3e12ca7-c78f-491d-bca2-3af653c9bf98","Type":"ContainerStarted","Data":"5a5b68230c62307b98edc4f1389d44b2b364838be5b0eb47d8949f87a6c5e2c7"} Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.723997 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" event={"ID":"cdca31a9-3f6d-402a-9a77-2e5156016199","Type":"ContainerStarted","Data":"40227b6206a814382841786440bbe352331b78c970672f55f3315e0b300caf3f"} Sep 30 09:51:40 crc kubenswrapper[4730]: W0930 09:51:40.727284 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod47a443f7_7d61_447b_b119_29dcd51b1b18.slice/crio-2bc82827de0365291daa0923dad93d253caa2b928abce592e08a25d7800878c7 WatchSource:0}: Error finding container 2bc82827de0365291daa0923dad93d253caa2b928abce592e08a25d7800878c7: Status 404 returned error can't find the container with id 2bc82827de0365291daa0923dad93d253caa2b928abce592e08a25d7800878c7 Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.730910 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" event={"ID":"a3cec9a5-601c-4ec9-a147-88a53c19de69","Type":"ContainerStarted","Data":"5a3ba3b1904d99d0b73a51e7128bb3560a51e392bcb2f970a6b19018c80fd55a"} Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.733092 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" event={"ID":"b437c48c-f825-4a51-9076-acf5dcd25e36","Type":"ContainerStarted","Data":"1327e7cc767626419db36e899ee267dd820eac7c4c930014bc47e025d0fd8b73"} Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.745150 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" event={"ID":"789ee928-afa8-424d-8810-6a04b2a7d5d6","Type":"ContainerStarted","Data":"5ac68592a2382e3974c5611ed937239201521d92a8954c4c54112d7212be213e"} Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.813277 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:40 crc kubenswrapper[4730]: E0930 09:51:40.814787 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:41.314768007 +0000 UTC m=+145.648028080 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.847830 4730 generic.go:334] "Generic (PLEG): container finished" podID="503191a0-1fb3-4b1e-84f1-ac3d702f686e" containerID="27acc00da28edd828fc1e906a45aeffdcb402798b794629cdc3a6dfe3ed3ccc4" exitCode=0 Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.849535 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tdl79" event={"ID":"503191a0-1fb3-4b1e-84f1-ac3d702f686e","Type":"ContainerDied","Data":"27acc00da28edd828fc1e906a45aeffdcb402798b794629cdc3a6dfe3ed3ccc4"} Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.908262 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" event={"ID":"4a7a5ec3-bbea-4979-a00d-25961a1fd8f6","Type":"ContainerStarted","Data":"44997fd158c66cbf901cb48cf3160b586d42157eb20541ad02188f445c7ce30e"} Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.925186 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:40 crc kubenswrapper[4730]: E0930 09:51:40.927016 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:41.426992029 +0000 UTC m=+145.760252022 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:40 crc kubenswrapper[4730]: I0930 09:51:40.979017 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" event={"ID":"b601ae3e-520b-43c1-8c4d-3946b0fb7cff","Type":"ContainerStarted","Data":"294935712d0c627490e4e0d0f523dc0a54e73182e2f20c1841f479c58350ec92"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.027487 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:41 crc kubenswrapper[4730]: E0930 09:51:41.030327 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:41.530312265 +0000 UTC m=+145.863572258 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.046222 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-md87h" podStartSLOduration=124.046201406 podStartE2EDuration="2m4.046201406s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:40.978773146 +0000 UTC m=+145.312033129" watchObservedRunningTime="2025-09-30 09:51:41.046201406 +0000 UTC m=+145.379461399" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.069449 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" event={"ID":"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b","Type":"ContainerStarted","Data":"ea8536ef532f521fce8c80c7a401626fc56319189efc9abff82027cb0bac4cc1"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.073985 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" event={"ID":"778ecda6-96a8-46f1-89e5-79c372bfc123","Type":"ContainerStarted","Data":"a63d9f4ad460c37178583aba1d04e8c1ee66567dcd6594a7541ac44573979892"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.128584 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:41 crc kubenswrapper[4730]: E0930 09:51:41.128777 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:41.628740635 +0000 UTC m=+145.962000638 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.133032 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:41 crc kubenswrapper[4730]: E0930 09:51:41.133456 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:41.633444576 +0000 UTC m=+145.966704569 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.129905 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" event={"ID":"c8aaf03c-1a0b-4346-99be-5367b88685bd","Type":"ContainerStarted","Data":"668e9ee6275370f9e3863ee3e97147581393d9b3ce998997a7e842a69a6a53c3"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.181956 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-m2sph" event={"ID":"16c0279b-a0e3-4400-be1a-c485c8ea0a34","Type":"ContainerStarted","Data":"4ef757579ecbee21c3ec716d0686d58644aefbac8bbd5de2220adc49f6103e19"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.192888 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-vgtf6" podStartSLOduration=124.192863273 podStartE2EDuration="2m4.192863273s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:41.191477965 +0000 UTC m=+145.524737958" watchObservedRunningTime="2025-09-30 09:51:41.192863273 +0000 UTC m=+145.526123266" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.193500 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-r86xc" podStartSLOduration=124.193492271 podStartE2EDuration="2m4.193492271s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:41.152650838 +0000 UTC m=+145.485910831" watchObservedRunningTime="2025-09-30 09:51:41.193492271 +0000 UTC m=+145.526752264" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.200177 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v86bf" event={"ID":"9297e80b-31ef-46af-98dc-8770fb66a889","Type":"ContainerStarted","Data":"45ac817fe6b369329ac6aa24299d9386894a9e362fcf979844152388b6a38a90"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.244595 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:41 crc kubenswrapper[4730]: E0930 09:51:41.245130 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:41.745112573 +0000 UTC m=+146.078372566 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.274884 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" event={"ID":"e0967d6a-234e-4f7b-b4ec-073e1822fec1","Type":"ContainerStarted","Data":"5a1dc14b9c9d039913ab1a3fa8ce2e0fb8983c19fbcb96f78e91a49c921f77ea"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.291475 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q" event={"ID":"eb3285d0-7f84-46d0-9c21-136e077b813a","Type":"ContainerStarted","Data":"14dec41ec2d54c89d1475f60d62803304f9c283e0f7f323415a81fd734ae4845"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.292685 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" podStartSLOduration=124.292669761 podStartE2EDuration="2m4.292669761s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:41.271557986 +0000 UTC m=+145.604817989" watchObservedRunningTime="2025-09-30 09:51:41.292669761 +0000 UTC m=+145.625929754" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.329796 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-m2sph" podStartSLOduration=124.329774961 podStartE2EDuration="2m4.329774961s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:41.32939068 +0000 UTC m=+145.662650673" watchObservedRunningTime="2025-09-30 09:51:41.329774961 +0000 UTC m=+145.663034954" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.346460 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-hlvdp" event={"ID":"53bbfcae-905c-4cb8-883d-e027fe0939b5","Type":"ContainerStarted","Data":"e625a8c02ebb384c3f2c11c1b8754fb9f46726328b0f6c884abc4852dddeb0b8"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.347603 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.348971 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:41 crc kubenswrapper[4730]: E0930 09:51:41.350111 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:41.850082854 +0000 UTC m=+146.183343047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.357797 4730 patch_prober.go:28] interesting pod/console-operator-58897d9998-hlvdp container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.357880 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-hlvdp" podUID="53bbfcae-905c-4cb8-883d-e027fe0939b5" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.361625 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" event={"ID":"94e2e017-59cc-4809-8244-a100190f35a9","Type":"ContainerStarted","Data":"ece61d86762bf8c425cbfb7335f2e4101dcf7d7cb1286adcad3763e108f9e9ab"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.377721 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zzmfv" podStartSLOduration=124.37769065 podStartE2EDuration="2m4.37769065s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:41.374892202 +0000 UTC m=+145.708152205" watchObservedRunningTime="2025-09-30 09:51:41.37769065 +0000 UTC m=+145.710950663" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.389024 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xbf9b" event={"ID":"27d3e752-f501-41c6-aed5-aa0e58103802","Type":"ContainerStarted","Data":"f7de979dbb1dc5622126900c16b6bbfafb1c4e41d47f45a2acd27330db4bd62a"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.420400 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-hlvdp" podStartSLOduration=124.420378344 podStartE2EDuration="2m4.420378344s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:41.419448608 +0000 UTC m=+145.752708611" watchObservedRunningTime="2025-09-30 09:51:41.420378344 +0000 UTC m=+145.753638367" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.420448 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-cv6jk" event={"ID":"17099759-4076-40ae-b158-b00e2d947421","Type":"ContainerStarted","Data":"de050b8e44a9d67c3c7d3bd60c26071aadbc1601deec398275937691af49f02a"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.420480 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-cv6jk" event={"ID":"17099759-4076-40ae-b158-b00e2d947421","Type":"ContainerStarted","Data":"6920bb0ae0d3c1b019991099e1dd3e0902cdb94380837bcf6130a0c8553c31a5"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.438114 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" event={"ID":"9285876c-50d7-4dc8-a57d-d7a6db5cf3ce","Type":"ContainerStarted","Data":"e9736e51f6bfb70719f3d0981a59e48eb248fe719a54df0a37a183322a26f734"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.438547 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.460985 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:41 crc kubenswrapper[4730]: E0930 09:51:41.463597 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:41.963560282 +0000 UTC m=+146.296820275 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.471553 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xhwpp" podStartSLOduration=124.471535642 podStartE2EDuration="2m4.471535642s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:41.469035723 +0000 UTC m=+145.802295716" watchObservedRunningTime="2025-09-30 09:51:41.471535642 +0000 UTC m=+145.804795635" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.490745 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.529145 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cjsxd" event={"ID":"800a0b26-7e9b-458b-95a9-703c7ace905f","Type":"ContainerStarted","Data":"2fc504942ce146a8ca0f57797400bc63397cbd5d89b1ba018e45205f41734b44"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.532418 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.547807 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.547872 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.547547 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7sbbv" podStartSLOduration=124.54751869 podStartE2EDuration="2m4.54751869s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:41.510967746 +0000 UTC m=+145.844227739" watchObservedRunningTime="2025-09-30 09:51:41.54751869 +0000 UTC m=+145.880778683" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.601511 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:41 crc kubenswrapper[4730]: E0930 09:51:41.603031 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:42.103018459 +0000 UTC m=+146.436278452 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.607777 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" event={"ID":"e15284e8-6f97-47e8-a08b-ad3d3a87de94","Type":"ContainerStarted","Data":"bcc72a1121bf2c4767461942564eaf1f097dbc51fa958cee27bf0ad5892a5190"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.616437 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" event={"ID":"ba9ae093-0fce-4e5d-a1c9-f199cb2ba008","Type":"ContainerStarted","Data":"c12e29ecf3439658b2c980ffb59694a6f18d4affb96808cb959be8008eef3a8a"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.639485 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" event={"ID":"175ee2dd-3d4e-4554-b13b-de02e542c8a3","Type":"ContainerStarted","Data":"2f919e6231d96d143b911d0920adad274f7471b274a7c38269b773fc61d952d4"} Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.640714 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-cv6jk" podStartSLOduration=6.640697044 podStartE2EDuration="6.640697044s" podCreationTimestamp="2025-09-30 09:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:41.604652994 +0000 UTC m=+145.937912987" watchObservedRunningTime="2025-09-30 09:51:41.640697044 +0000 UTC m=+145.973957037" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.662814 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.663180 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.702278 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:41 crc kubenswrapper[4730]: E0930 09:51:41.702765 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:42.202742305 +0000 UTC m=+146.536002298 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.804598 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:41 crc kubenswrapper[4730]: E0930 09:51:41.808067 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:42.308047966 +0000 UTC m=+146.641308159 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.906718 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:41 crc kubenswrapper[4730]: E0930 09:51:41.906843 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:42.406815485 +0000 UTC m=+146.740075478 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:41 crc kubenswrapper[4730]: I0930 09:51:41.907170 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:41 crc kubenswrapper[4730]: E0930 09:51:41.907755 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:42.407740041 +0000 UTC m=+146.741000034 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.008288 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:42 crc kubenswrapper[4730]: E0930 09:51:42.008728 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:42.508712221 +0000 UTC m=+146.841972214 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.110033 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:42 crc kubenswrapper[4730]: E0930 09:51:42.110374 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:42.61036314 +0000 UTC m=+146.943623133 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.211973 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:42 crc kubenswrapper[4730]: E0930 09:51:42.212662 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:42.712634468 +0000 UTC m=+147.045894461 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.246822 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.246903 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.269859 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.313531 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:42 crc kubenswrapper[4730]: E0930 09:51:42.314005 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:42.813989998 +0000 UTC m=+147.147249991 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.414103 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:42 crc kubenswrapper[4730]: E0930 09:51:42.414401 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:42.914380213 +0000 UTC m=+147.247640206 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.414698 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:42 crc kubenswrapper[4730]: E0930 09:51:42.415056 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:42.915044021 +0000 UTC m=+147.248304014 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.521325 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:42 crc kubenswrapper[4730]: E0930 09:51:42.521723 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:43.021682548 +0000 UTC m=+147.354942541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.522026 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:42 crc kubenswrapper[4730]: E0930 09:51:42.522457 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:43.02244242 +0000 UTC m=+147.355702413 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.540209 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:42 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:42 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:42 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.540301 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.623179 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:42 crc kubenswrapper[4730]: E0930 09:51:42.623593 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:43.123575155 +0000 UTC m=+147.456835148 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.681663 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" event={"ID":"f1f19ecd-c701-4a85-85a4-05f74f6d6f60","Type":"ContainerStarted","Data":"8150334408b045cc55e5f9c486d0a0f4ea85cf71bc67de14f2bea9b342265024"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.712652 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" event={"ID":"47a443f7-7d61-447b-b119-29dcd51b1b18","Type":"ContainerStarted","Data":"e94bbc773fe8bfcdb493a02ccda8b8ce059c6ce0e8368296e2b13dd81a7d5f05"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.712947 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" event={"ID":"47a443f7-7d61-447b-b119-29dcd51b1b18","Type":"ContainerStarted","Data":"2bc82827de0365291daa0923dad93d253caa2b928abce592e08a25d7800878c7"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.724864 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.725222 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-2psv2" event={"ID":"d3e12ca7-c78f-491d-bca2-3af653c9bf98","Type":"ContainerStarted","Data":"ad2d919408d6f5be4be05a5292607af26253232f085cd702556bdba0ecc20e99"} Sep 30 09:51:42 crc kubenswrapper[4730]: E0930 09:51:42.725265 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:43.225252615 +0000 UTC m=+147.558512608 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.743995 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" event={"ID":"b437c48c-f825-4a51-9076-acf5dcd25e36","Type":"ContainerStarted","Data":"67a0db591a5ac5da6e2caa2d9b76dd4067616304a38ee48c82db2ccbb8028ec5"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.744906 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.745879 4730 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nt7nq container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" start-of-body= Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.745921 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" podUID="b437c48c-f825-4a51-9076-acf5dcd25e36" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.748736 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" podStartSLOduration=125.748715305 podStartE2EDuration="2m5.748715305s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:42.746456913 +0000 UTC m=+147.079716916" watchObservedRunningTime="2025-09-30 09:51:42.748715305 +0000 UTC m=+147.081975298" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.763892 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" event={"ID":"c7fc44ff-ad31-4a42-9946-a3285b56bafb","Type":"ContainerStarted","Data":"9127d1d5a0328460b6185a9bf9740b1affe1ff2dcf5a6d323d6ef80ba5ff935e"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.763953 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" event={"ID":"c7fc44ff-ad31-4a42-9946-a3285b56bafb","Type":"ContainerStarted","Data":"eaa21c8f154dc727d368904ac61cf2f37d77a9c134fb270264c66ed5106226f4"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.763969 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" event={"ID":"c7fc44ff-ad31-4a42-9946-a3285b56bafb","Type":"ContainerStarted","Data":"9c4e787c59c28f46e7b147ec5546238f81eef6e7342b34a7aad53d0015a62c1d"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.775400 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" podStartSLOduration=125.775379264 podStartE2EDuration="2m5.775379264s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:42.773997136 +0000 UTC m=+147.107257139" watchObservedRunningTime="2025-09-30 09:51:42.775379264 +0000 UTC m=+147.108639267" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.789289 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" event={"ID":"175ee2dd-3d4e-4554-b13b-de02e542c8a3","Type":"ContainerStarted","Data":"1829e8da28040f212a6887bfd00c903035cd934c70b49fa08a4b97e31ce9f9e2"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.803678 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-gf4lh" event={"ID":"af4abbd6-d1b9-411d-9128-cc5b74a93eb5","Type":"ContainerStarted","Data":"a8c9385966db8c8234ac24a06f09080fd90b5000f93c2c54cc61657b1753573d"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.807786 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-gf4lh" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.808709 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-2psv2" podStartSLOduration=7.808693679 podStartE2EDuration="7.808693679s" podCreationTimestamp="2025-09-30 09:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:42.807524556 +0000 UTC m=+147.140784549" watchObservedRunningTime="2025-09-30 09:51:42.808693679 +0000 UTC m=+147.141953672" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.814650 4730 patch_prober.go:28] interesting pod/downloads-7954f5f757-gf4lh container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.814746 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gf4lh" podUID="af4abbd6-d1b9-411d-9128-cc5b74a93eb5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.832116 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:42 crc kubenswrapper[4730]: E0930 09:51:42.834030 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:43.334010091 +0000 UTC m=+147.667270084 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.846181 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cjsxd" event={"ID":"800a0b26-7e9b-458b-95a9-703c7ace905f","Type":"ContainerStarted","Data":"19e532fa3044cfd5fbb8fc33c08e0804b86150565128f283bf17239cd9de63a6"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.869974 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh" event={"ID":"1445a993-db8f-4cdb-a89c-9f45a3ee0b4d","Type":"ContainerStarted","Data":"f58d8890ed37d414ebc41085e22ed2fc83796aa5254de2be5ee60dad72e59114"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.870020 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh" event={"ID":"1445a993-db8f-4cdb-a89c-9f45a3ee0b4d","Type":"ContainerStarted","Data":"bcb51663d145436764087b8f0631a92d1c9d61501d85ec47e4ccaef37ddd3a22"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.876238 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" event={"ID":"cdca31a9-3f6d-402a-9a77-2e5156016199","Type":"ContainerStarted","Data":"2e730208c78512a648a44fb727876a028751e2fb41c6cec8095d34e0be95accf"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.882877 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-gf4lh" podStartSLOduration=125.882855235 podStartE2EDuration="2m5.882855235s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:42.870768681 +0000 UTC m=+147.204028674" watchObservedRunningTime="2025-09-30 09:51:42.882855235 +0000 UTC m=+147.216115238" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.909536 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" event={"ID":"a3cec9a5-601c-4ec9-a147-88a53c19de69","Type":"ContainerStarted","Data":"aada0d00132de2aa40d1efb8f2b2350c33047f298f2c458c9d7212513fe4ef65"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.909600 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" event={"ID":"a3cec9a5-601c-4ec9-a147-88a53c19de69","Type":"ContainerStarted","Data":"0c57baa293ecc0dc038df90b4702cf7fece7f35a47995b36b314955b75558c08"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.922709 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" event={"ID":"c8aaf03c-1a0b-4346-99be-5367b88685bd","Type":"ContainerStarted","Data":"c30d38ac1e44e1a79d75fbf6e7de00358ea582dd8d3cb2110d2bc9d3d68633dd"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.922768 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.926264 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-ssdbb" podStartSLOduration=124.926241939 podStartE2EDuration="2m4.926241939s" podCreationTimestamp="2025-09-30 09:49:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:42.915195512 +0000 UTC m=+147.248455505" watchObservedRunningTime="2025-09-30 09:51:42.926241939 +0000 UTC m=+147.259501932" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.934819 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:42 crc kubenswrapper[4730]: E0930 09:51:42.936001 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:43.435989609 +0000 UTC m=+147.769249602 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.972553 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vgdj9" podStartSLOduration=125.972308177 podStartE2EDuration="2m5.972308177s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:42.96522141 +0000 UTC m=+147.298481403" watchObservedRunningTime="2025-09-30 09:51:42.972308177 +0000 UTC m=+147.305568170" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.972766 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" event={"ID":"ba9ae093-0fce-4e5d-a1c9-f199cb2ba008","Type":"ContainerStarted","Data":"8f9733c238befc42382524bf3585a295096042fb8153cb13a81ac5c8f07c26ea"} Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.976347 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.984240 4730 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-cffv2 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Sep 30 09:51:42 crc kubenswrapper[4730]: I0930 09:51:42.984309 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" podUID="ba9ae093-0fce-4e5d-a1c9-f199cb2ba008" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.002980 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q" event={"ID":"eb3285d0-7f84-46d0-9c21-136e077b813a","Type":"ContainerStarted","Data":"5c01c606406fa0a8dd3050d85ff2e7d30a978ac29636c51f9fdb49feb0a92f1f"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.010353 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-brcnh" podStartSLOduration=126.010332491 podStartE2EDuration="2m6.010332491s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:43.008921562 +0000 UTC m=+147.342181555" watchObservedRunningTime="2025-09-30 09:51:43.010332491 +0000 UTC m=+147.343592484" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.029049 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" event={"ID":"435a6f87-b787-4c31-b41f-8013e1aaae11","Type":"ContainerStarted","Data":"5c1e7d9d89ac1d25171da75151aae47608fc4d6d3e96059c0c7301e78499babd"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.029118 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" event={"ID":"435a6f87-b787-4c31-b41f-8013e1aaae11","Type":"ContainerStarted","Data":"9524dca421c1bc5a270c9e70e0f4bbb55e1adacb20007422b735344d54b09645"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.029826 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.036191 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:43 crc kubenswrapper[4730]: E0930 09:51:43.040474 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:43.540457587 +0000 UTC m=+147.873717580 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.065963 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" event={"ID":"e15284e8-6f97-47e8-a08b-ad3d3a87de94","Type":"ContainerStarted","Data":"558b39b5deb35f6767bd6517aa288f9abeb02042f394e21771b0553eef14b1cf"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.066995 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" podStartSLOduration=126.066979673 podStartE2EDuration="2m6.066979673s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:43.066816998 +0000 UTC m=+147.400076991" watchObservedRunningTime="2025-09-30 09:51:43.066979673 +0000 UTC m=+147.400239666" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.094043 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" event={"ID":"555ceaf9-a5fa-41b9-ba91-92bd420e0c1b","Type":"ContainerStarted","Data":"14413999acce16be891212791800822bae466050740aa02e07089c36518bdf0b"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.128437 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkmzt" event={"ID":"778ecda6-96a8-46f1-89e5-79c372bfc123","Type":"ContainerStarted","Data":"c93e67375bba09e381dfb91fd13ef208cf56c7cdf51d3e5bff6f98743090aa6b"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.136675 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-9ckzv" podStartSLOduration=126.136645384 podStartE2EDuration="2m6.136645384s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:43.134505805 +0000 UTC m=+147.467765798" watchObservedRunningTime="2025-09-30 09:51:43.136645384 +0000 UTC m=+147.469905377" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.137404 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:43 crc kubenswrapper[4730]: E0930 09:51:43.139003 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:43.638983549 +0000 UTC m=+147.972243612 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.167009 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-r8pgh" event={"ID":"4658f792-b8d4-4b22-92b7-46f6c8944eba","Type":"ContainerStarted","Data":"d04000ad342e6a852e6b7101f142bd8ecd5b051907501cc648fcd4047dc3614e"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.167056 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-r8pgh" event={"ID":"4658f792-b8d4-4b22-92b7-46f6c8944eba","Type":"ContainerStarted","Data":"2131c48eafb7076d0ab11ab0ee771e91df69e67ac2dd94df7fbf90f014b9078c"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.224844 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v86bf" event={"ID":"9297e80b-31ef-46af-98dc-8770fb66a889","Type":"ContainerStarted","Data":"3733f03afc14cab0d989e91dfa6488cc4166903f3cfad0d2d6486db0a22f641e"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.224892 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v86bf" event={"ID":"9297e80b-31ef-46af-98dc-8770fb66a889","Type":"ContainerStarted","Data":"4537ac7bd517b132148e2ad5f0b7a7d9cde107b4d21efc02597ca10e3513ade6"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.231177 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-v6vtc" podStartSLOduration=126.231161896 podStartE2EDuration="2m6.231161896s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:43.216008556 +0000 UTC m=+147.549268549" watchObservedRunningTime="2025-09-30 09:51:43.231161896 +0000 UTC m=+147.564421889" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.233224 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fnbq4"] Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.238729 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:43 crc kubenswrapper[4730]: E0930 09:51:43.238865 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:43.738842179 +0000 UTC m=+148.072102192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.239208 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:43 crc kubenswrapper[4730]: E0930 09:51:43.240999 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:43.740990529 +0000 UTC m=+148.074250522 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.246955 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" event={"ID":"21ce9670-1b8d-4896-a33b-b6dc5125dbdc","Type":"ContainerStarted","Data":"f6db091943505385aa763b9938f6c1f38462c60b317c8fa688d8ee7bbd31921c"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.246981 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.254011 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" event={"ID":"b601ae3e-520b-43c1-8c4d-3946b0fb7cff","Type":"ContainerStarted","Data":"84bf8119f2da87c7b06d24729954073495c73efa84c2bca4c70b9ae20e4bade3"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.256424 4730 generic.go:334] "Generic (PLEG): container finished" podID="3970c1fd-c1a9-40a4-a2b5-276df544f222" containerID="6fd5bfc4611b4d5ce043248cc06c1d394b1fd36764ad6f79d4e2a8bcedd764fd" exitCode=0 Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.256711 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" event={"ID":"3970c1fd-c1a9-40a4-a2b5-276df544f222","Type":"ContainerDied","Data":"6fd5bfc4611b4d5ce043248cc06c1d394b1fd36764ad6f79d4e2a8bcedd764fd"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.269940 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.290516 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fnbq4"] Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.297833 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xbf9b" event={"ID":"27d3e752-f501-41c6-aed5-aa0e58103802","Type":"ContainerStarted","Data":"154aecd1316467642b69eaf31406ea160a46d5cdecc985e96810514880c6e203"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.297877 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xbf9b" event={"ID":"27d3e752-f501-41c6-aed5-aa0e58103802","Type":"ContainerStarted","Data":"dc79989451a420a4405a4a61d03680f31c512dfdd68b3d6f98bd30d1bf7e30f3"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.298100 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-f4hwj" podStartSLOduration=125.298077142 podStartE2EDuration="2m5.298077142s" podCreationTimestamp="2025-09-30 09:49:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:43.286564503 +0000 UTC m=+147.619824496" watchObservedRunningTime="2025-09-30 09:51:43.298077142 +0000 UTC m=+147.631337135" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.319394 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8qhvr"] Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.320404 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.326251 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tdl79" event={"ID":"503191a0-1fb3-4b1e-84f1-ac3d702f686e","Type":"ContainerStarted","Data":"c9c0e44f5f8438ed8dee8db20135a184993179c651db81ebea6d7b613fbf6b95"} Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.350053 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.350800 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.351510 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4xfm\" (UniqueName: \"kubernetes.io/projected/9ea0a817-d369-46fc-9e35-ab227abcbf25-kube-api-access-q4xfm\") pod \"community-operators-fnbq4\" (UID: \"9ea0a817-d369-46fc-9e35-ab227abcbf25\") " pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.351541 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ea0a817-d369-46fc-9e35-ab227abcbf25-catalog-content\") pod \"community-operators-fnbq4\" (UID: \"9ea0a817-d369-46fc-9e35-ab227abcbf25\") " pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.351569 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ea0a817-d369-46fc-9e35-ab227abcbf25-utilities\") pod \"community-operators-fnbq4\" (UID: \"9ea0a817-d369-46fc-9e35-ab227abcbf25\") " pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:51:43 crc kubenswrapper[4730]: E0930 09:51:43.353421 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:43.853367195 +0000 UTC m=+148.186627188 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.359228 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-hlvdp" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.373373 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-cxj9q" podStartSLOduration=126.37335831 podStartE2EDuration="2m6.37335831s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:43.371126018 +0000 UTC m=+147.704386011" watchObservedRunningTime="2025-09-30 09:51:43.37335831 +0000 UTC m=+147.706618303" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.381372 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vxqtl" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.390658 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8qhvr"] Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.417055 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" podStartSLOduration=126.417027601 podStartE2EDuration="2m6.417027601s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:43.415438747 +0000 UTC m=+147.748698740" watchObservedRunningTime="2025-09-30 09:51:43.417027601 +0000 UTC m=+147.750287594" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.459363 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-27l2q" podStartSLOduration=126.459348475 podStartE2EDuration="2m6.459348475s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:43.457952156 +0000 UTC m=+147.791212149" watchObservedRunningTime="2025-09-30 09:51:43.459348475 +0000 UTC m=+147.792608468" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.460122 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd78d8d1-8617-4c3d-9205-abd8bbdde710-utilities\") pod \"certified-operators-8qhvr\" (UID: \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\") " pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.460431 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4xfm\" (UniqueName: \"kubernetes.io/projected/9ea0a817-d369-46fc-9e35-ab227abcbf25-kube-api-access-q4xfm\") pod \"community-operators-fnbq4\" (UID: \"9ea0a817-d369-46fc-9e35-ab227abcbf25\") " pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.460510 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ea0a817-d369-46fc-9e35-ab227abcbf25-catalog-content\") pod \"community-operators-fnbq4\" (UID: \"9ea0a817-d369-46fc-9e35-ab227abcbf25\") " pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.460533 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ea0a817-d369-46fc-9e35-ab227abcbf25-utilities\") pod \"community-operators-fnbq4\" (UID: \"9ea0a817-d369-46fc-9e35-ab227abcbf25\") " pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.460594 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84k94\" (UniqueName: \"kubernetes.io/projected/cd78d8d1-8617-4c3d-9205-abd8bbdde710-kube-api-access-84k94\") pod \"certified-operators-8qhvr\" (UID: \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\") " pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.460970 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.461114 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd78d8d1-8617-4c3d-9205-abd8bbdde710-catalog-content\") pod \"certified-operators-8qhvr\" (UID: \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\") " pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:51:43 crc kubenswrapper[4730]: E0930 09:51:43.465118 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:43.965104524 +0000 UTC m=+148.298364517 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.486072 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ea0a817-d369-46fc-9e35-ab227abcbf25-utilities\") pod \"community-operators-fnbq4\" (UID: \"9ea0a817-d369-46fc-9e35-ab227abcbf25\") " pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.494579 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" podStartSLOduration=126.494563121 podStartE2EDuration="2m6.494563121s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:43.493161832 +0000 UTC m=+147.826421825" watchObservedRunningTime="2025-09-30 09:51:43.494563121 +0000 UTC m=+147.827823114" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.512096 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ea0a817-d369-46fc-9e35-ab227abcbf25-catalog-content\") pod \"community-operators-fnbq4\" (UID: \"9ea0a817-d369-46fc-9e35-ab227abcbf25\") " pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.547832 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:43 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:43 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:43 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.550371 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.567358 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.567682 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84k94\" (UniqueName: \"kubernetes.io/projected/cd78d8d1-8617-4c3d-9205-abd8bbdde710-kube-api-access-84k94\") pod \"certified-operators-8qhvr\" (UID: \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\") " pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.567798 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd78d8d1-8617-4c3d-9205-abd8bbdde710-catalog-content\") pod \"certified-operators-8qhvr\" (UID: \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\") " pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.567833 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd78d8d1-8617-4c3d-9205-abd8bbdde710-utilities\") pod \"certified-operators-8qhvr\" (UID: \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\") " pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.568405 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd78d8d1-8617-4c3d-9205-abd8bbdde710-utilities\") pod \"certified-operators-8qhvr\" (UID: \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\") " pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:51:43 crc kubenswrapper[4730]: E0930 09:51:43.568509 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.068488332 +0000 UTC m=+148.401748325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.571830 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4xfm\" (UniqueName: \"kubernetes.io/projected/9ea0a817-d369-46fc-9e35-ab227abcbf25-kube-api-access-q4xfm\") pod \"community-operators-fnbq4\" (UID: \"9ea0a817-d369-46fc-9e35-ab227abcbf25\") " pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.574258 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.579698 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd78d8d1-8617-4c3d-9205-abd8bbdde710-catalog-content\") pod \"certified-operators-8qhvr\" (UID: \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\") " pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.581260 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v67gm"] Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.582535 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.583684 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v67gm"] Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.630437 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84k94\" (UniqueName: \"kubernetes.io/projected/cd78d8d1-8617-4c3d-9205-abd8bbdde710-kube-api-access-84k94\") pod \"certified-operators-8qhvr\" (UID: \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\") " pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.657958 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-v86bf" podStartSLOduration=126.657934233 podStartE2EDuration="2m6.657934233s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:43.655250358 +0000 UTC m=+147.988510351" watchObservedRunningTime="2025-09-30 09:51:43.657934233 +0000 UTC m=+147.991194236" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.669496 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30fe799a-bbf1-4e62-8187-353da7ca9930-catalog-content\") pod \"community-operators-v67gm\" (UID: \"30fe799a-bbf1-4e62-8187-353da7ca9930\") " pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.669558 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjprn\" (UniqueName: \"kubernetes.io/projected/30fe799a-bbf1-4e62-8187-353da7ca9930-kube-api-access-xjprn\") pod \"community-operators-v67gm\" (UID: \"30fe799a-bbf1-4e62-8187-353da7ca9930\") " pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.669591 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30fe799a-bbf1-4e62-8187-353da7ca9930-utilities\") pod \"community-operators-v67gm\" (UID: \"30fe799a-bbf1-4e62-8187-353da7ca9930\") " pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.669648 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:43 crc kubenswrapper[4730]: E0930 09:51:43.670108 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.170091419 +0000 UTC m=+148.503351412 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.679278 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.722179 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fbd6s"] Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.723228 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.749877 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fbd6s"] Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.756710 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-xbf9b" podStartSLOduration=126.756682951 podStartE2EDuration="2m6.756682951s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:43.746969852 +0000 UTC m=+148.080229845" watchObservedRunningTime="2025-09-30 09:51:43.756682951 +0000 UTC m=+148.089942944" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.776152 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:43 crc kubenswrapper[4730]: E0930 09:51:43.777730 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.277707644 +0000 UTC m=+148.610967647 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.777811 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30fe799a-bbf1-4e62-8187-353da7ca9930-catalog-content\") pod \"community-operators-v67gm\" (UID: \"30fe799a-bbf1-4e62-8187-353da7ca9930\") " pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.777857 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjprn\" (UniqueName: \"kubernetes.io/projected/30fe799a-bbf1-4e62-8187-353da7ca9930-kube-api-access-xjprn\") pod \"community-operators-v67gm\" (UID: \"30fe799a-bbf1-4e62-8187-353da7ca9930\") " pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.777894 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fe7d185-5cd3-4de1-a842-065620d27fdf-utilities\") pod \"certified-operators-fbd6s\" (UID: \"6fe7d185-5cd3-4de1-a842-065620d27fdf\") " pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.777935 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30fe799a-bbf1-4e62-8187-353da7ca9930-utilities\") pod \"community-operators-v67gm\" (UID: \"30fe799a-bbf1-4e62-8187-353da7ca9930\") " pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.778014 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.778078 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl7dn\" (UniqueName: \"kubernetes.io/projected/6fe7d185-5cd3-4de1-a842-065620d27fdf-kube-api-access-gl7dn\") pod \"certified-operators-fbd6s\" (UID: \"6fe7d185-5cd3-4de1-a842-065620d27fdf\") " pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.778113 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fe7d185-5cd3-4de1-a842-065620d27fdf-catalog-content\") pod \"certified-operators-fbd6s\" (UID: \"6fe7d185-5cd3-4de1-a842-065620d27fdf\") " pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.781579 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sfwd4" podStartSLOduration=126.781563801 podStartE2EDuration="2m6.781563801s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:43.779890175 +0000 UTC m=+148.113150178" watchObservedRunningTime="2025-09-30 09:51:43.781563801 +0000 UTC m=+148.114823794" Sep 30 09:51:43 crc kubenswrapper[4730]: E0930 09:51:43.782170 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.282150608 +0000 UTC m=+148.615410601 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.790911 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30fe799a-bbf1-4e62-8187-353da7ca9930-catalog-content\") pod \"community-operators-v67gm\" (UID: \"30fe799a-bbf1-4e62-8187-353da7ca9930\") " pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.792286 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30fe799a-bbf1-4e62-8187-353da7ca9930-utilities\") pod \"community-operators-v67gm\" (UID: \"30fe799a-bbf1-4e62-8187-353da7ca9930\") " pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.832567 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjprn\" (UniqueName: \"kubernetes.io/projected/30fe799a-bbf1-4e62-8187-353da7ca9930-kube-api-access-xjprn\") pod \"community-operators-v67gm\" (UID: \"30fe799a-bbf1-4e62-8187-353da7ca9930\") " pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.886177 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.886408 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl7dn\" (UniqueName: \"kubernetes.io/projected/6fe7d185-5cd3-4de1-a842-065620d27fdf-kube-api-access-gl7dn\") pod \"certified-operators-fbd6s\" (UID: \"6fe7d185-5cd3-4de1-a842-065620d27fdf\") " pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.886448 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fe7d185-5cd3-4de1-a842-065620d27fdf-catalog-content\") pod \"certified-operators-fbd6s\" (UID: \"6fe7d185-5cd3-4de1-a842-065620d27fdf\") " pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.886532 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fe7d185-5cd3-4de1-a842-065620d27fdf-utilities\") pod \"certified-operators-fbd6s\" (UID: \"6fe7d185-5cd3-4de1-a842-065620d27fdf\") " pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.887044 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fe7d185-5cd3-4de1-a842-065620d27fdf-utilities\") pod \"certified-operators-fbd6s\" (UID: \"6fe7d185-5cd3-4de1-a842-065620d27fdf\") " pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:51:43 crc kubenswrapper[4730]: E0930 09:51:43.887148 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.387127029 +0000 UTC m=+148.720387022 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.887724 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fe7d185-5cd3-4de1-a842-065620d27fdf-catalog-content\") pod \"certified-operators-fbd6s\" (UID: \"6fe7d185-5cd3-4de1-a842-065620d27fdf\") " pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.905725 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.923374 4730 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-b829k container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.923462 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" podUID="c8aaf03c-1a0b-4346-99be-5367b88685bd" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.25:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.941472 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl7dn\" (UniqueName: \"kubernetes.io/projected/6fe7d185-5cd3-4de1-a842-065620d27fdf-kube-api-access-gl7dn\") pod \"certified-operators-fbd6s\" (UID: \"6fe7d185-5cd3-4de1-a842-065620d27fdf\") " pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.951167 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9x2qr" podStartSLOduration=126.951129354 podStartE2EDuration="2m6.951129354s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:43.94702182 +0000 UTC m=+148.280281813" watchObservedRunningTime="2025-09-30 09:51:43.951129354 +0000 UTC m=+148.284389347" Sep 30 09:51:43 crc kubenswrapper[4730]: I0930 09:51:43.988386 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:43 crc kubenswrapper[4730]: E0930 09:51:43.988725 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.488713427 +0000 UTC m=+148.821973410 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.086422 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.093473 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:44 crc kubenswrapper[4730]: E0930 09:51:44.093826 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.593809191 +0000 UTC m=+148.927069184 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.197754 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:44 crc kubenswrapper[4730]: E0930 09:51:44.198461 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.698446964 +0000 UTC m=+149.031706957 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.307199 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.307483 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.307510 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:44 crc kubenswrapper[4730]: E0930 09:51:44.308103 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.808053464 +0000 UTC m=+149.141313507 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.314758 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.315404 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.408472 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.408529 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.408624 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.411862 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-cjsxd" event={"ID":"800a0b26-7e9b-458b-95a9-703c7ace905f","Type":"ContainerStarted","Data":"fa24e24975cef7fa1e1fe6113c5dc6040605b6c211b39e35647d6d7c7af85f8d"} Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.420276 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.420511 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:44 crc kubenswrapper[4730]: E0930 09:51:44.420814 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:44.920801901 +0000 UTC m=+149.254061894 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.430799 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.431856 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-r8pgh" event={"ID":"4658f792-b8d4-4b22-92b7-46f6c8944eba","Type":"ContainerStarted","Data":"457357c3914fc1753952cde4081cee450b72567dd948543230e5b781551e4808"} Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.432239 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-r8pgh" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.439301 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.474126 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-cjsxd" podStartSLOduration=127.474102619 podStartE2EDuration="2m7.474102619s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:44.431547588 +0000 UTC m=+148.764807592" watchObservedRunningTime="2025-09-30 09:51:44.474102619 +0000 UTC m=+148.807362622" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.476080 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-r8pgh" podStartSLOduration=9.476064763 podStartE2EDuration="9.476064763s" podCreationTimestamp="2025-09-30 09:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:44.473741329 +0000 UTC m=+148.807001332" watchObservedRunningTime="2025-09-30 09:51:44.476064763 +0000 UTC m=+148.809324766" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.503004 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tdl79" event={"ID":"503191a0-1fb3-4b1e-84f1-ac3d702f686e","Type":"ContainerStarted","Data":"089f477c400857cc65d27d626a65927537c5e6a98815c2d62b003142803e7692"} Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.515012 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:44 crc kubenswrapper[4730]: E0930 09:51:44.520961 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:45.020930158 +0000 UTC m=+149.354190191 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.522378 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fnbq4"] Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.544905 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-tdl79" podStartSLOduration=127.544874742 podStartE2EDuration="2m7.544874742s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:44.543506934 +0000 UTC m=+148.876766927" watchObservedRunningTime="2025-09-30 09:51:44.544874742 +0000 UTC m=+148.878134735" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.546842 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:44 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:44 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:44 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.546906 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.583301 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" event={"ID":"3970c1fd-c1a9-40a4-a2b5-276df544f222","Type":"ContainerStarted","Data":"50c8f80ece4fb659f6f520d4256206c4d12ecee54d5ce8adc51302ca4a3a7c30"} Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.584346 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.617951 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" podStartSLOduration=127.617929638 podStartE2EDuration="2m7.617929638s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:44.615024667 +0000 UTC m=+148.948284660" watchObservedRunningTime="2025-09-30 09:51:44.617929638 +0000 UTC m=+148.951189631" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.625297 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:44 crc kubenswrapper[4730]: E0930 09:51:44.626835 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:45.126822915 +0000 UTC m=+149.460082908 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.697110 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" event={"ID":"f1f19ecd-c701-4a85-85a4-05f74f6d6f60","Type":"ContainerStarted","Data":"48d48850f784d7a89dfd3e375edab0aa5c5b0c03bbcf7ae45ac9fdd97a8d66c0"} Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.703831 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8qhvr"] Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.711945 4730 patch_prober.go:28] interesting pod/downloads-7954f5f757-gf4lh container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.712001 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gf4lh" podUID="af4abbd6-d1b9-411d-9128-cc5b74a93eb5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.720916 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-b829k" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.728789 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-cffv2" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.729720 4730 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-nt7nq container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" start-of-body= Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.729792 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" podUID="b437c48c-f825-4a51-9076-acf5dcd25e36" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.735079 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.758905 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:44 crc kubenswrapper[4730]: E0930 09:51:44.765468 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:45.265425769 +0000 UTC m=+149.598685762 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.765770 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:44 crc kubenswrapper[4730]: E0930 09:51:44.797812 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:45.297791347 +0000 UTC m=+149.631051340 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.809084 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v67gm"] Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.875350 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:44 crc kubenswrapper[4730]: E0930 09:51:44.879107 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:45.379089552 +0000 UTC m=+149.712349545 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:44 crc kubenswrapper[4730]: I0930 09:51:44.990287 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:44 crc kubenswrapper[4730]: E0930 09:51:44.990877 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:45.490862382 +0000 UTC m=+149.824122375 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.075405 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fbd6s"] Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.092051 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:45 crc kubenswrapper[4730]: E0930 09:51:45.092378 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:45.592358826 +0000 UTC m=+149.925618819 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.194249 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:45 crc kubenswrapper[4730]: E0930 09:51:45.195057 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:45.695020074 +0000 UTC m=+150.028280077 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.303875 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:45 crc kubenswrapper[4730]: E0930 09:51:45.304538 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:45.804513831 +0000 UTC m=+150.137773824 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.332928 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8srlp"] Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.334211 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.340103 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.361254 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8srlp"] Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.407985 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:45 crc kubenswrapper[4730]: E0930 09:51:45.408666 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:45.908651619 +0000 UTC m=+150.241911602 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.410687 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/297e4d9f-18c2-4e11-a89b-11df93cba4ef-catalog-content\") pod \"redhat-marketplace-8srlp\" (UID: \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\") " pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.410819 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsmsb\" (UniqueName: \"kubernetes.io/projected/297e4d9f-18c2-4e11-a89b-11df93cba4ef-kube-api-access-vsmsb\") pod \"redhat-marketplace-8srlp\" (UID: \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\") " pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.411186 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/297e4d9f-18c2-4e11-a89b-11df93cba4ef-utilities\") pod \"redhat-marketplace-8srlp\" (UID: \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\") " pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:51:45 crc kubenswrapper[4730]: W0930 09:51:45.472076 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-aa35f143aebb4eb3de4ddcc22c61ddf4363a253c55ac4fa89edefdba8748304f WatchSource:0}: Error finding container aa35f143aebb4eb3de4ddcc22c61ddf4363a253c55ac4fa89edefdba8748304f: Status 404 returned error can't find the container with id aa35f143aebb4eb3de4ddcc22c61ddf4363a253c55ac4fa89edefdba8748304f Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.512067 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.512347 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/297e4d9f-18c2-4e11-a89b-11df93cba4ef-catalog-content\") pod \"redhat-marketplace-8srlp\" (UID: \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\") " pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.512369 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsmsb\" (UniqueName: \"kubernetes.io/projected/297e4d9f-18c2-4e11-a89b-11df93cba4ef-kube-api-access-vsmsb\") pod \"redhat-marketplace-8srlp\" (UID: \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\") " pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.512408 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/297e4d9f-18c2-4e11-a89b-11df93cba4ef-utilities\") pod \"redhat-marketplace-8srlp\" (UID: \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\") " pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.513397 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/297e4d9f-18c2-4e11-a89b-11df93cba4ef-catalog-content\") pod \"redhat-marketplace-8srlp\" (UID: \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\") " pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:51:45 crc kubenswrapper[4730]: E0930 09:51:45.513559 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:46.013537648 +0000 UTC m=+150.346797641 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.522024 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/297e4d9f-18c2-4e11-a89b-11df93cba4ef-utilities\") pod \"redhat-marketplace-8srlp\" (UID: \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\") " pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.542280 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:45 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:45 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:45 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.542354 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.544989 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsmsb\" (UniqueName: \"kubernetes.io/projected/297e4d9f-18c2-4e11-a89b-11df93cba4ef-kube-api-access-vsmsb\") pod \"redhat-marketplace-8srlp\" (UID: \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\") " pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.613462 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:45 crc kubenswrapper[4730]: E0930 09:51:45.614022 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:46.113994674 +0000 UTC m=+150.447254667 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.714807 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:45 crc kubenswrapper[4730]: E0930 09:51:45.715397 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:46.215383366 +0000 UTC m=+150.548643359 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.732272 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zsmr9"] Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.735010 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.735448 4730 generic.go:334] "Generic (PLEG): container finished" podID="30fe799a-bbf1-4e62-8187-353da7ca9930" containerID="ff3d5848c16585363eb437317caaaa90cfcbd997cb68b0c1f33a8d0a61419652" exitCode=0 Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.735533 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v67gm" event={"ID":"30fe799a-bbf1-4e62-8187-353da7ca9930","Type":"ContainerDied","Data":"ff3d5848c16585363eb437317caaaa90cfcbd997cb68b0c1f33a8d0a61419652"} Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.735563 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v67gm" event={"ID":"30fe799a-bbf1-4e62-8187-353da7ca9930","Type":"ContainerStarted","Data":"121b681cbb69bb5363a0405cfc095365c6bc659202242b2c3eb65d7ce453c42d"} Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.739078 4730 generic.go:334] "Generic (PLEG): container finished" podID="9ea0a817-d369-46fc-9e35-ab227abcbf25" containerID="776418ea5720644edcbda23ad765d15fecd354331babf1927c2b8d2076015452" exitCode=0 Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.739132 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fnbq4" event={"ID":"9ea0a817-d369-46fc-9e35-ab227abcbf25","Type":"ContainerDied","Data":"776418ea5720644edcbda23ad765d15fecd354331babf1927c2b8d2076015452"} Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.739156 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fnbq4" event={"ID":"9ea0a817-d369-46fc-9e35-ab227abcbf25","Type":"ContainerStarted","Data":"55a2e0adc3faa400b8585bf18bebd20608c22943fd3f66fc704ced8189781262"} Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.741229 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.749039 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zsmr9"] Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.762005 4730 generic.go:334] "Generic (PLEG): container finished" podID="cd78d8d1-8617-4c3d-9205-abd8bbdde710" containerID="6c116a99bd8ab138907566ca989fb722795edd4380180a47ae5b42370c0593c6" exitCode=0 Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.762100 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8qhvr" event={"ID":"cd78d8d1-8617-4c3d-9205-abd8bbdde710","Type":"ContainerDied","Data":"6c116a99bd8ab138907566ca989fb722795edd4380180a47ae5b42370c0593c6"} Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.762162 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8qhvr" event={"ID":"cd78d8d1-8617-4c3d-9205-abd8bbdde710","Type":"ContainerStarted","Data":"172d388c58b194e9773a4bcb28a549a1b5f2baa00a80bb6a7ce08a08df4a7ca1"} Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.771533 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" event={"ID":"f1f19ecd-c701-4a85-85a4-05f74f6d6f60","Type":"ContainerStarted","Data":"d9c685118174b5ee7a0c560ce56fe531064e2fdd5b27f6597dd598ce4d7d4819"} Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.782441 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"21af92ebfcba62d754bea689587ff296e958db0a0b241f6f67c243f58da6a5c4"} Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.782487 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"2f672749be944ac7aecd45b0e0bc01e2e8dacfc86f435d07a61bc5d9ac1414bf"} Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.801885 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.807034 4730 generic.go:334] "Generic (PLEG): container finished" podID="6fe7d185-5cd3-4de1-a842-065620d27fdf" containerID="c7192a5752bbec093162bf63e7301f5c41386e30115e40bfe58dafe0969d70b2" exitCode=0 Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.807126 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbd6s" event={"ID":"6fe7d185-5cd3-4de1-a842-065620d27fdf","Type":"ContainerDied","Data":"c7192a5752bbec093162bf63e7301f5c41386e30115e40bfe58dafe0969d70b2"} Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.807173 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbd6s" event={"ID":"6fe7d185-5cd3-4de1-a842-065620d27fdf","Type":"ContainerStarted","Data":"caf4527daaf45ed1b9844bb9c959a04ded47e26b11052061b104e0f0732f4606"} Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.814310 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"1aca1ccc2c914c8d450acf264c419645f7f4222d9c2c8229ebecf2013666da11"} Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.817741 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6d72e26-75bd-4095-aae3-345a816de053-catalog-content\") pod \"redhat-marketplace-zsmr9\" (UID: \"b6d72e26-75bd-4095-aae3-345a816de053\") " pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.817829 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.817935 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv4g8\" (UniqueName: \"kubernetes.io/projected/b6d72e26-75bd-4095-aae3-345a816de053-kube-api-access-lv4g8\") pod \"redhat-marketplace-zsmr9\" (UID: \"b6d72e26-75bd-4095-aae3-345a816de053\") " pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.817984 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6d72e26-75bd-4095-aae3-345a816de053-utilities\") pod \"redhat-marketplace-zsmr9\" (UID: \"b6d72e26-75bd-4095-aae3-345a816de053\") " pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:51:45 crc kubenswrapper[4730]: E0930 09:51:45.819243 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:46.319212915 +0000 UTC m=+150.652472908 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.823781 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"aa35f143aebb4eb3de4ddcc22c61ddf4363a253c55ac4fa89edefdba8748304f"} Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.824026 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.825507 4730 patch_prober.go:28] interesting pod/downloads-7954f5f757-gf4lh container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.825580 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gf4lh" podUID="af4abbd6-d1b9-411d-9128-cc5b74a93eb5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.831496 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.924221 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:45 crc kubenswrapper[4730]: E0930 09:51:45.924826 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:46.424793504 +0000 UTC m=+150.758053497 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.925276 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.925571 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv4g8\" (UniqueName: \"kubernetes.io/projected/b6d72e26-75bd-4095-aae3-345a816de053-kube-api-access-lv4g8\") pod \"redhat-marketplace-zsmr9\" (UID: \"b6d72e26-75bd-4095-aae3-345a816de053\") " pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.925715 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6d72e26-75bd-4095-aae3-345a816de053-utilities\") pod \"redhat-marketplace-zsmr9\" (UID: \"b6d72e26-75bd-4095-aae3-345a816de053\") " pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.925927 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6d72e26-75bd-4095-aae3-345a816de053-catalog-content\") pod \"redhat-marketplace-zsmr9\" (UID: \"b6d72e26-75bd-4095-aae3-345a816de053\") " pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:51:45 crc kubenswrapper[4730]: E0930 09:51:45.927942 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:46.427922521 +0000 UTC m=+150.761182514 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.929715 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6d72e26-75bd-4095-aae3-345a816de053-utilities\") pod \"redhat-marketplace-zsmr9\" (UID: \"b6d72e26-75bd-4095-aae3-345a816de053\") " pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.935670 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6d72e26-75bd-4095-aae3-345a816de053-catalog-content\") pod \"redhat-marketplace-zsmr9\" (UID: \"b6d72e26-75bd-4095-aae3-345a816de053\") " pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:51:45 crc kubenswrapper[4730]: I0930 09:51:45.994937 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv4g8\" (UniqueName: \"kubernetes.io/projected/b6d72e26-75bd-4095-aae3-345a816de053-kube-api-access-lv4g8\") pod \"redhat-marketplace-zsmr9\" (UID: \"b6d72e26-75bd-4095-aae3-345a816de053\") " pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.030567 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:46 crc kubenswrapper[4730]: E0930 09:51:46.031114 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:46.531090733 +0000 UTC m=+150.864350726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.096025 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.133254 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:46 crc kubenswrapper[4730]: E0930 09:51:46.134182 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:46.634163951 +0000 UTC m=+150.967423944 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.236663 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:46 crc kubenswrapper[4730]: E0930 09:51:46.236862 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:46.736830789 +0000 UTC m=+151.070090792 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.237090 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:46 crc kubenswrapper[4730]: E0930 09:51:46.237476 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:46.737464996 +0000 UTC m=+151.070724989 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.239166 4730 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.309142 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5g8hq"] Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.310102 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.312761 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.322228 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5g8hq"] Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.332344 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8srlp"] Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.338234 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:46 crc kubenswrapper[4730]: E0930 09:51:46.338507 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:46.838490588 +0000 UTC m=+151.171750581 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:46 crc kubenswrapper[4730]: W0930 09:51:46.346385 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod297e4d9f_18c2_4e11_a89b_11df93cba4ef.slice/crio-9bcabb30039f75d4670167f4ca1a270216e2ba169fce17eaac84854f8118f33f WatchSource:0}: Error finding container 9bcabb30039f75d4670167f4ca1a270216e2ba169fce17eaac84854f8118f33f: Status 404 returned error can't find the container with id 9bcabb30039f75d4670167f4ca1a270216e2ba169fce17eaac84854f8118f33f Sep 30 09:51:46 crc kubenswrapper[4730]: W0930 09:51:46.438421 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6d72e26_75bd_4095_aae3_345a816de053.slice/crio-49f0e8db32ba925ba6d847e5c36be1bf05bb171caa8ef6dda7eaacad1f750081 WatchSource:0}: Error finding container 49f0e8db32ba925ba6d847e5c36be1bf05bb171caa8ef6dda7eaacad1f750081: Status 404 returned error can't find the container with id 49f0e8db32ba925ba6d847e5c36be1bf05bb171caa8ef6dda7eaacad1f750081 Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.439355 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bllk\" (UniqueName: \"kubernetes.io/projected/57333b74-aac2-4b1b-af18-e4dece554edb-kube-api-access-2bllk\") pod \"redhat-operators-5g8hq\" (UID: \"57333b74-aac2-4b1b-af18-e4dece554edb\") " pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.439414 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57333b74-aac2-4b1b-af18-e4dece554edb-utilities\") pod \"redhat-operators-5g8hq\" (UID: \"57333b74-aac2-4b1b-af18-e4dece554edb\") " pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.439451 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.439509 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57333b74-aac2-4b1b-af18-e4dece554edb-catalog-content\") pod \"redhat-operators-5g8hq\" (UID: \"57333b74-aac2-4b1b-af18-e4dece554edb\") " pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:51:46 crc kubenswrapper[4730]: E0930 09:51:46.441004 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 09:51:46.94099033 +0000 UTC m=+151.274250333 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xdzzr" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.449836 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zsmr9"] Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.533446 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:46 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:46 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:46 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.533500 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.541685 4730 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-30T09:51:46.239186853Z","Handler":null,"Name":""} Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.544791 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.545013 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57333b74-aac2-4b1b-af18-e4dece554edb-catalog-content\") pod \"redhat-operators-5g8hq\" (UID: \"57333b74-aac2-4b1b-af18-e4dece554edb\") " pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:51:46 crc kubenswrapper[4730]: E0930 09:51:46.545091 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 09:51:47.045071728 +0000 UTC m=+151.378331721 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.545150 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bllk\" (UniqueName: \"kubernetes.io/projected/57333b74-aac2-4b1b-af18-e4dece554edb-kube-api-access-2bllk\") pod \"redhat-operators-5g8hq\" (UID: \"57333b74-aac2-4b1b-af18-e4dece554edb\") " pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.545192 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57333b74-aac2-4b1b-af18-e4dece554edb-utilities\") pod \"redhat-operators-5g8hq\" (UID: \"57333b74-aac2-4b1b-af18-e4dece554edb\") " pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.545527 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57333b74-aac2-4b1b-af18-e4dece554edb-catalog-content\") pod \"redhat-operators-5g8hq\" (UID: \"57333b74-aac2-4b1b-af18-e4dece554edb\") " pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.545740 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57333b74-aac2-4b1b-af18-e4dece554edb-utilities\") pod \"redhat-operators-5g8hq\" (UID: \"57333b74-aac2-4b1b-af18-e4dece554edb\") " pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.548410 4730 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.548456 4730 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.581939 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bllk\" (UniqueName: \"kubernetes.io/projected/57333b74-aac2-4b1b-af18-e4dece554edb-kube-api-access-2bllk\") pod \"redhat-operators-5g8hq\" (UID: \"57333b74-aac2-4b1b-af18-e4dece554edb\") " pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.637137 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.647032 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.649443 4730 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.649485 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.679694 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xdzzr\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.700831 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.707161 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-drr8f"] Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.708658 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.717465 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-drr8f"] Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.748544 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.763196 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.851538 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57751804-fe40-4d95-a5b2-529037d3ba17-catalog-content\") pod \"redhat-operators-drr8f\" (UID: \"57751804-fe40-4d95-a5b2-529037d3ba17\") " pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.851667 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvwfp\" (UniqueName: \"kubernetes.io/projected/57751804-fe40-4d95-a5b2-529037d3ba17-kube-api-access-lvwfp\") pod \"redhat-operators-drr8f\" (UID: \"57751804-fe40-4d95-a5b2-529037d3ba17\") " pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.851860 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57751804-fe40-4d95-a5b2-529037d3ba17-utilities\") pod \"redhat-operators-drr8f\" (UID: \"57751804-fe40-4d95-a5b2-529037d3ba17\") " pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.878223 4730 generic.go:334] "Generic (PLEG): container finished" podID="b6d72e26-75bd-4095-aae3-345a816de053" containerID="917c2735a5940773083e97852550f4198cf6df8bae127c4849cfb8c9c0cdde7c" exitCode=0 Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.878330 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zsmr9" event={"ID":"b6d72e26-75bd-4095-aae3-345a816de053","Type":"ContainerDied","Data":"917c2735a5940773083e97852550f4198cf6df8bae127c4849cfb8c9c0cdde7c"} Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.878548 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zsmr9" event={"ID":"b6d72e26-75bd-4095-aae3-345a816de053","Type":"ContainerStarted","Data":"49f0e8db32ba925ba6d847e5c36be1bf05bb171caa8ef6dda7eaacad1f750081"} Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.901269 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f2cadb52c8d7106e81879d784f9fe3cd5a313a392cfb01afd50853b91604e119"} Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.903784 4730 generic.go:334] "Generic (PLEG): container finished" podID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" containerID="07663d8f06a233839c7a81e64fc3b252e93dce3596ef18b41c0d0b263ffe348a" exitCode=0 Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.903886 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8srlp" event={"ID":"297e4d9f-18c2-4e11-a89b-11df93cba4ef","Type":"ContainerDied","Data":"07663d8f06a233839c7a81e64fc3b252e93dce3596ef18b41c0d0b263ffe348a"} Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.903927 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8srlp" event={"ID":"297e4d9f-18c2-4e11-a89b-11df93cba4ef","Type":"ContainerStarted","Data":"9bcabb30039f75d4670167f4ca1a270216e2ba169fce17eaac84854f8118f33f"} Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.905722 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"12d904afd2e323ca4460e824ef6c3efbe75ead55d00b34f8ba4e88c300d3da62"} Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.911416 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" event={"ID":"f1f19ecd-c701-4a85-85a4-05f74f6d6f60","Type":"ContainerStarted","Data":"fe4c762fc3bc88b1f9e3c7effa893a883d25368479feee931f53aa6d210317ca"} Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.911795 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" event={"ID":"f1f19ecd-c701-4a85-85a4-05f74f6d6f60","Type":"ContainerStarted","Data":"42e750551e18bcd96f2ed0b637d4d3c774e8b3a04f037bd3584d398ace0da627"} Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.917585 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-75wjk" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.918532 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5g8hq"] Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.953280 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57751804-fe40-4d95-a5b2-529037d3ba17-utilities\") pod \"redhat-operators-drr8f\" (UID: \"57751804-fe40-4d95-a5b2-529037d3ba17\") " pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.953357 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57751804-fe40-4d95-a5b2-529037d3ba17-catalog-content\") pod \"redhat-operators-drr8f\" (UID: \"57751804-fe40-4d95-a5b2-529037d3ba17\") " pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.953390 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvwfp\" (UniqueName: \"kubernetes.io/projected/57751804-fe40-4d95-a5b2-529037d3ba17-kube-api-access-lvwfp\") pod \"redhat-operators-drr8f\" (UID: \"57751804-fe40-4d95-a5b2-529037d3ba17\") " pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.954566 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57751804-fe40-4d95-a5b2-529037d3ba17-catalog-content\") pod \"redhat-operators-drr8f\" (UID: \"57751804-fe40-4d95-a5b2-529037d3ba17\") " pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.956086 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57751804-fe40-4d95-a5b2-529037d3ba17-utilities\") pod \"redhat-operators-drr8f\" (UID: \"57751804-fe40-4d95-a5b2-529037d3ba17\") " pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:51:46 crc kubenswrapper[4730]: I0930 09:51:46.974832 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-jj6x6" podStartSLOduration=11.974816377 podStartE2EDuration="11.974816377s" podCreationTimestamp="2025-09-30 09:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:46.974668023 +0000 UTC m=+151.307928026" watchObservedRunningTime="2025-09-30 09:51:46.974816377 +0000 UTC m=+151.308076370" Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:46.996988 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvwfp\" (UniqueName: \"kubernetes.io/projected/57751804-fe40-4d95-a5b2-529037d3ba17-kube-api-access-lvwfp\") pod \"redhat-operators-drr8f\" (UID: \"57751804-fe40-4d95-a5b2-529037d3ba17\") " pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.047743 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xdzzr"] Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.085297 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.470843 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-drr8f"] Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.539088 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:47 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:47 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:47 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.549939 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:47 crc kubenswrapper[4730]: W0930 09:51:47.595867 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57751804_fe40_4d95_a5b2_529037d3ba17.slice/crio-cff2d873c4c0adabe867feb666f6c6e7899eaa4baa05dff2864d870effe64416 WatchSource:0}: Error finding container cff2d873c4c0adabe867feb666f6c6e7899eaa4baa05dff2864d870effe64416: Status 404 returned error can't find the container with id cff2d873c4c0adabe867feb666f6c6e7899eaa4baa05dff2864d870effe64416 Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.826852 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.827200 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.847783 4730 patch_prober.go:28] interesting pod/apiserver-76f77b778f-tdl79 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Sep 30 09:51:47 crc kubenswrapper[4730]: [+]log ok Sep 30 09:51:47 crc kubenswrapper[4730]: [+]etcd ok Sep 30 09:51:47 crc kubenswrapper[4730]: [+]poststarthook/start-apiserver-admission-initializer ok Sep 30 09:51:47 crc kubenswrapper[4730]: [+]poststarthook/generic-apiserver-start-informers ok Sep 30 09:51:47 crc kubenswrapper[4730]: [+]poststarthook/max-in-flight-filter ok Sep 30 09:51:47 crc kubenswrapper[4730]: [+]poststarthook/storage-object-count-tracker-hook ok Sep 30 09:51:47 crc kubenswrapper[4730]: [+]poststarthook/image.openshift.io-apiserver-caches ok Sep 30 09:51:47 crc kubenswrapper[4730]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Sep 30 09:51:47 crc kubenswrapper[4730]: [+]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa ok Sep 30 09:51:47 crc kubenswrapper[4730]: [+]poststarthook/project.openshift.io-projectcache ok Sep 30 09:51:47 crc kubenswrapper[4730]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Sep 30 09:51:47 crc kubenswrapper[4730]: [+]poststarthook/openshift.io-startinformers ok Sep 30 09:51:47 crc kubenswrapper[4730]: [+]poststarthook/openshift.io-restmapperupdater ok Sep 30 09:51:47 crc kubenswrapper[4730]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Sep 30 09:51:47 crc kubenswrapper[4730]: livez check failed Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.847842 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-tdl79" podUID="503191a0-1fb3-4b1e-84f1-ac3d702f686e" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.908256 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.908354 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.914878 4730 patch_prober.go:28] interesting pod/console-f9d7485db-md87h container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.914964 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-md87h" podUID="3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" containerName="console" probeResult="failure" output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.940573 4730 generic.go:334] "Generic (PLEG): container finished" podID="57333b74-aac2-4b1b-af18-e4dece554edb" containerID="21ff7005bcd1bccc40ef59c3aa64dacb5aaa02cbc62008dfcd39aed9feb4d949" exitCode=0 Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.940704 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5g8hq" event={"ID":"57333b74-aac2-4b1b-af18-e4dece554edb","Type":"ContainerDied","Data":"21ff7005bcd1bccc40ef59c3aa64dacb5aaa02cbc62008dfcd39aed9feb4d949"} Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.940734 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5g8hq" event={"ID":"57333b74-aac2-4b1b-af18-e4dece554edb","Type":"ContainerStarted","Data":"a5f37e8709017513fa7dcca88ef358f0f473cbed1ad8964a5d7055fcaa2f0a13"} Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.951589 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drr8f" event={"ID":"57751804-fe40-4d95-a5b2-529037d3ba17","Type":"ContainerStarted","Data":"ace0bf4216c56b67458dd27514f850e9c4ca94d3fa487db289f01f806982fd04"} Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.951658 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drr8f" event={"ID":"57751804-fe40-4d95-a5b2-529037d3ba17","Type":"ContainerStarted","Data":"cff2d873c4c0adabe867feb666f6c6e7899eaa4baa05dff2864d870effe64416"} Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.967484 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" event={"ID":"3f90d05f-7820-4af0-8894-6d63dc672f33","Type":"ContainerStarted","Data":"925a3e6dc870bbe1c06be6e0fb4bcc9955a89ce4133720edb6f2cf30b8e3874f"} Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.967524 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.967534 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" event={"ID":"3f90d05f-7820-4af0-8894-6d63dc672f33","Type":"ContainerStarted","Data":"b40a39b2fd2392d4d71a6c43cd8556afbdc15e514538736631c8aef71d0ca341"} Sep 30 09:51:47 crc kubenswrapper[4730]: I0930 09:51:47.984709 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" podStartSLOduration=130.984692235 podStartE2EDuration="2m10.984692235s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:47.983534033 +0000 UTC m=+152.316794026" watchObservedRunningTime="2025-09-30 09:51:47.984692235 +0000 UTC m=+152.317952228" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.081851 4730 patch_prober.go:28] interesting pod/downloads-7954f5f757-gf4lh container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.081905 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gf4lh" podUID="af4abbd6-d1b9-411d-9128-cc5b74a93eb5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.083926 4730 patch_prober.go:28] interesting pod/downloads-7954f5f757-gf4lh container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.083987 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-gf4lh" podUID="af4abbd6-d1b9-411d-9128-cc5b74a93eb5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.264558 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.265279 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.275025 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.293569 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.296525 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.392714 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0f9427a2-2872-43a6-9dfe-5754901b1043-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0f9427a2-2872-43a6-9dfe-5754901b1043\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.392760 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0f9427a2-2872-43a6-9dfe-5754901b1043-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0f9427a2-2872-43a6-9dfe-5754901b1043\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.397425 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.494099 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0f9427a2-2872-43a6-9dfe-5754901b1043-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0f9427a2-2872-43a6-9dfe-5754901b1043\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.494170 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0f9427a2-2872-43a6-9dfe-5754901b1043-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0f9427a2-2872-43a6-9dfe-5754901b1043\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.494338 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0f9427a2-2872-43a6-9dfe-5754901b1043-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0f9427a2-2872-43a6-9dfe-5754901b1043\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.531811 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.554737 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0f9427a2-2872-43a6-9dfe-5754901b1043-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0f9427a2-2872-43a6-9dfe-5754901b1043\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.557666 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:48 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:48 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:48 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.557880 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.602367 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.985033 4730 generic.go:334] "Generic (PLEG): container finished" podID="47a443f7-7d61-447b-b119-29dcd51b1b18" containerID="e94bbc773fe8bfcdb493a02ccda8b8ce059c6ce0e8368296e2b13dd81a7d5f05" exitCode=0 Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.985195 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" event={"ID":"47a443f7-7d61-447b-b119-29dcd51b1b18","Type":"ContainerDied","Data":"e94bbc773fe8bfcdb493a02ccda8b8ce059c6ce0e8368296e2b13dd81a7d5f05"} Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.991748 4730 generic.go:334] "Generic (PLEG): container finished" podID="57751804-fe40-4d95-a5b2-529037d3ba17" containerID="ace0bf4216c56b67458dd27514f850e9c4ca94d3fa487db289f01f806982fd04" exitCode=0 Sep 30 09:51:48 crc kubenswrapper[4730]: I0930 09:51:48.992302 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drr8f" event={"ID":"57751804-fe40-4d95-a5b2-529037d3ba17","Type":"ContainerDied","Data":"ace0bf4216c56b67458dd27514f850e9c4ca94d3fa487db289f01f806982fd04"} Sep 30 09:51:49 crc kubenswrapper[4730]: I0930 09:51:49.162452 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 09:51:49 crc kubenswrapper[4730]: I0930 09:51:49.533858 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:49 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:49 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:49 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:49 crc kubenswrapper[4730]: I0930 09:51:49.533923 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.003924 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"0f9427a2-2872-43a6-9dfe-5754901b1043","Type":"ContainerStarted","Data":"bf93d2f126f81c806e7da5e5cf8a054badd97098dd42ac1928d0a3d301c0d697"} Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.005335 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"0f9427a2-2872-43a6-9dfe-5754901b1043","Type":"ContainerStarted","Data":"99c3a4e02c3b61699fcbccaff29ba666e224f452ea788807f537f2ede7977799"} Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.023206 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.023185133 podStartE2EDuration="2.023185133s" podCreationTimestamp="2025-09-30 09:51:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:51:50.021072574 +0000 UTC m=+154.354332567" watchObservedRunningTime="2025-09-30 09:51:50.023185133 +0000 UTC m=+154.356445126" Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.534778 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:50 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:50 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:50 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.534880 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.587533 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.647019 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47a443f7-7d61-447b-b119-29dcd51b1b18-secret-volume\") pod \"47a443f7-7d61-447b-b119-29dcd51b1b18\" (UID: \"47a443f7-7d61-447b-b119-29dcd51b1b18\") " Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.647112 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r56fk\" (UniqueName: \"kubernetes.io/projected/47a443f7-7d61-447b-b119-29dcd51b1b18-kube-api-access-r56fk\") pod \"47a443f7-7d61-447b-b119-29dcd51b1b18\" (UID: \"47a443f7-7d61-447b-b119-29dcd51b1b18\") " Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.647153 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47a443f7-7d61-447b-b119-29dcd51b1b18-config-volume\") pod \"47a443f7-7d61-447b-b119-29dcd51b1b18\" (UID: \"47a443f7-7d61-447b-b119-29dcd51b1b18\") " Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.648270 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47a443f7-7d61-447b-b119-29dcd51b1b18-config-volume" (OuterVolumeSpecName: "config-volume") pod "47a443f7-7d61-447b-b119-29dcd51b1b18" (UID: "47a443f7-7d61-447b-b119-29dcd51b1b18"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.652519 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47a443f7-7d61-447b-b119-29dcd51b1b18-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "47a443f7-7d61-447b-b119-29dcd51b1b18" (UID: "47a443f7-7d61-447b-b119-29dcd51b1b18"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.658175 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47a443f7-7d61-447b-b119-29dcd51b1b18-kube-api-access-r56fk" (OuterVolumeSpecName: "kube-api-access-r56fk") pod "47a443f7-7d61-447b-b119-29dcd51b1b18" (UID: "47a443f7-7d61-447b-b119-29dcd51b1b18"). InnerVolumeSpecName "kube-api-access-r56fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.748701 4730 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/47a443f7-7d61-447b-b119-29dcd51b1b18-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.748743 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r56fk\" (UniqueName: \"kubernetes.io/projected/47a443f7-7d61-447b-b119-29dcd51b1b18-kube-api-access-r56fk\") on node \"crc\" DevicePath \"\"" Sep 30 09:51:50 crc kubenswrapper[4730]: I0930 09:51:50.748755 4730 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/47a443f7-7d61-447b-b119-29dcd51b1b18-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.016159 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" event={"ID":"47a443f7-7d61-447b-b119-29dcd51b1b18","Type":"ContainerDied","Data":"2bc82827de0365291daa0923dad93d253caa2b928abce592e08a25d7800878c7"} Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.016224 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bc82827de0365291daa0923dad93d253caa2b928abce592e08a25d7800878c7" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.016288 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.028045 4730 generic.go:334] "Generic (PLEG): container finished" podID="0f9427a2-2872-43a6-9dfe-5754901b1043" containerID="bf93d2f126f81c806e7da5e5cf8a054badd97098dd42ac1928d0a3d301c0d697" exitCode=0 Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.028121 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"0f9427a2-2872-43a6-9dfe-5754901b1043","Type":"ContainerDied","Data":"bf93d2f126f81c806e7da5e5cf8a054badd97098dd42ac1928d0a3d301c0d697"} Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.384345 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 09:51:51 crc kubenswrapper[4730]: E0930 09:51:51.385220 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47a443f7-7d61-447b-b119-29dcd51b1b18" containerName="collect-profiles" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.385239 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="47a443f7-7d61-447b-b119-29dcd51b1b18" containerName="collect-profiles" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.385436 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="47a443f7-7d61-447b-b119-29dcd51b1b18" containerName="collect-profiles" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.386091 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.390039 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.390443 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.395758 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.460025 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f65dbc04-da69-4a9e-b564-bd39da6f5afe-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f65dbc04-da69-4a9e-b564-bd39da6f5afe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.460219 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f65dbc04-da69-4a9e-b564-bd39da6f5afe-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f65dbc04-da69-4a9e-b564-bd39da6f5afe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.534212 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:51 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:51 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:51 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.534278 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.562018 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f65dbc04-da69-4a9e-b564-bd39da6f5afe-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f65dbc04-da69-4a9e-b564-bd39da6f5afe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.562121 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f65dbc04-da69-4a9e-b564-bd39da6f5afe-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f65dbc04-da69-4a9e-b564-bd39da6f5afe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.562277 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f65dbc04-da69-4a9e-b564-bd39da6f5afe-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f65dbc04-da69-4a9e-b564-bd39da6f5afe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.603415 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f65dbc04-da69-4a9e-b564-bd39da6f5afe-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f65dbc04-da69-4a9e-b564-bd39da6f5afe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 09:51:51 crc kubenswrapper[4730]: I0930 09:51:51.710284 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 09:51:52 crc kubenswrapper[4730]: I0930 09:51:52.067293 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 09:51:52 crc kubenswrapper[4730]: W0930 09:51:52.160147 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf65dbc04_da69_4a9e_b564_bd39da6f5afe.slice/crio-fae6b5ec13a8ee13cd979983369a76a6f0119fbff8aa1da6dc958d42fc083f28 WatchSource:0}: Error finding container fae6b5ec13a8ee13cd979983369a76a6f0119fbff8aa1da6dc958d42fc083f28: Status 404 returned error can't find the container with id fae6b5ec13a8ee13cd979983369a76a6f0119fbff8aa1da6dc958d42fc083f28 Sep 30 09:51:52 crc kubenswrapper[4730]: I0930 09:51:52.420516 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 09:51:52 crc kubenswrapper[4730]: I0930 09:51:52.475127 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0f9427a2-2872-43a6-9dfe-5754901b1043-kubelet-dir\") pod \"0f9427a2-2872-43a6-9dfe-5754901b1043\" (UID: \"0f9427a2-2872-43a6-9dfe-5754901b1043\") " Sep 30 09:51:52 crc kubenswrapper[4730]: I0930 09:51:52.475254 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0f9427a2-2872-43a6-9dfe-5754901b1043-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "0f9427a2-2872-43a6-9dfe-5754901b1043" (UID: "0f9427a2-2872-43a6-9dfe-5754901b1043"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:51:52 crc kubenswrapper[4730]: I0930 09:51:52.475420 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0f9427a2-2872-43a6-9dfe-5754901b1043-kube-api-access\") pod \"0f9427a2-2872-43a6-9dfe-5754901b1043\" (UID: \"0f9427a2-2872-43a6-9dfe-5754901b1043\") " Sep 30 09:51:52 crc kubenswrapper[4730]: I0930 09:51:52.475838 4730 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0f9427a2-2872-43a6-9dfe-5754901b1043-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 09:51:52 crc kubenswrapper[4730]: I0930 09:51:52.487983 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f9427a2-2872-43a6-9dfe-5754901b1043-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0f9427a2-2872-43a6-9dfe-5754901b1043" (UID: "0f9427a2-2872-43a6-9dfe-5754901b1043"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:51:52 crc kubenswrapper[4730]: I0930 09:51:52.535533 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:52 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:52 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:52 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:52 crc kubenswrapper[4730]: I0930 09:51:52.535604 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:52 crc kubenswrapper[4730]: I0930 09:51:52.577717 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0f9427a2-2872-43a6-9dfe-5754901b1043-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 09:51:52 crc kubenswrapper[4730]: I0930 09:51:52.829545 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:52 crc kubenswrapper[4730]: I0930 09:51:52.836125 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-tdl79" Sep 30 09:51:53 crc kubenswrapper[4730]: I0930 09:51:53.069013 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f65dbc04-da69-4a9e-b564-bd39da6f5afe","Type":"ContainerStarted","Data":"fae6b5ec13a8ee13cd979983369a76a6f0119fbff8aa1da6dc958d42fc083f28"} Sep 30 09:51:53 crc kubenswrapper[4730]: I0930 09:51:53.117482 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 09:51:53 crc kubenswrapper[4730]: I0930 09:51:53.117757 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"0f9427a2-2872-43a6-9dfe-5754901b1043","Type":"ContainerDied","Data":"99c3a4e02c3b61699fcbccaff29ba666e224f452ea788807f537f2ede7977799"} Sep 30 09:51:53 crc kubenswrapper[4730]: I0930 09:51:53.117871 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99c3a4e02c3b61699fcbccaff29ba666e224f452ea788807f537f2ede7977799" Sep 30 09:51:53 crc kubenswrapper[4730]: I0930 09:51:53.534550 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:53 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:53 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:53 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:53 crc kubenswrapper[4730]: I0930 09:51:53.534675 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:53 crc kubenswrapper[4730]: I0930 09:51:53.709678 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-r8pgh" Sep 30 09:51:54 crc kubenswrapper[4730]: I0930 09:51:54.203808 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f65dbc04-da69-4a9e-b564-bd39da6f5afe","Type":"ContainerStarted","Data":"272a0a94cee4f4157233fd07537c671e9dce82ed07b344a9dd5c5920076b0a03"} Sep 30 09:51:54 crc kubenswrapper[4730]: I0930 09:51:54.533724 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:54 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:54 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:54 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:54 crc kubenswrapper[4730]: I0930 09:51:54.533798 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:55 crc kubenswrapper[4730]: I0930 09:51:55.222222 4730 generic.go:334] "Generic (PLEG): container finished" podID="f65dbc04-da69-4a9e-b564-bd39da6f5afe" containerID="272a0a94cee4f4157233fd07537c671e9dce82ed07b344a9dd5c5920076b0a03" exitCode=0 Sep 30 09:51:55 crc kubenswrapper[4730]: I0930 09:51:55.222312 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f65dbc04-da69-4a9e-b564-bd39da6f5afe","Type":"ContainerDied","Data":"272a0a94cee4f4157233fd07537c671e9dce82ed07b344a9dd5c5920076b0a03"} Sep 30 09:51:55 crc kubenswrapper[4730]: I0930 09:51:55.533971 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:55 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:55 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:55 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:55 crc kubenswrapper[4730]: I0930 09:51:55.534037 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:56 crc kubenswrapper[4730]: I0930 09:51:56.534882 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:56 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:56 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:56 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:56 crc kubenswrapper[4730]: I0930 09:51:56.535478 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:57 crc kubenswrapper[4730]: I0930 09:51:57.532368 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:57 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:57 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:57 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:57 crc kubenswrapper[4730]: I0930 09:51:57.532440 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:57 crc kubenswrapper[4730]: I0930 09:51:57.908871 4730 patch_prober.go:28] interesting pod/console-f9d7485db-md87h container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Sep 30 09:51:57 crc kubenswrapper[4730]: I0930 09:51:57.908942 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-md87h" podUID="3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" containerName="console" probeResult="failure" output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" Sep 30 09:51:58 crc kubenswrapper[4730]: I0930 09:51:58.081457 4730 patch_prober.go:28] interesting pod/downloads-7954f5f757-gf4lh container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 09:51:58 crc kubenswrapper[4730]: I0930 09:51:58.081428 4730 patch_prober.go:28] interesting pod/downloads-7954f5f757-gf4lh container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Sep 30 09:51:58 crc kubenswrapper[4730]: I0930 09:51:58.081693 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-gf4lh" podUID="af4abbd6-d1b9-411d-9128-cc5b74a93eb5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 09:51:58 crc kubenswrapper[4730]: I0930 09:51:58.081525 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gf4lh" podUID="af4abbd6-d1b9-411d-9128-cc5b74a93eb5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Sep 30 09:51:58 crc kubenswrapper[4730]: I0930 09:51:58.535582 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:58 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:58 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:58 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:58 crc kubenswrapper[4730]: I0930 09:51:58.536024 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:59 crc kubenswrapper[4730]: I0930 09:51:59.533415 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:51:59 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:51:59 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:51:59 crc kubenswrapper[4730]: healthz check failed Sep 30 09:51:59 crc kubenswrapper[4730]: I0930 09:51:59.533530 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:51:59 crc kubenswrapper[4730]: I0930 09:51:59.551452 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:59 crc kubenswrapper[4730]: I0930 09:51:59.562876 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/be86a67e-c663-4551-9ecf-a8c2a9801cd7-metrics-certs\") pod \"network-metrics-daemon-dqqrb\" (UID: \"be86a67e-c663-4551-9ecf-a8c2a9801cd7\") " pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:51:59 crc kubenswrapper[4730]: I0930 09:51:59.743717 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-dqqrb" Sep 30 09:52:00 crc kubenswrapper[4730]: I0930 09:52:00.534133 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:52:00 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:52:00 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:52:00 crc kubenswrapper[4730]: healthz check failed Sep 30 09:52:00 crc kubenswrapper[4730]: I0930 09:52:00.534198 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:52:01 crc kubenswrapper[4730]: I0930 09:52:01.533360 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:52:01 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:52:01 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:52:01 crc kubenswrapper[4730]: healthz check failed Sep 30 09:52:01 crc kubenswrapper[4730]: I0930 09:52:01.533906 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.161999 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.287093 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f65dbc04-da69-4a9e-b564-bd39da6f5afe","Type":"ContainerDied","Data":"fae6b5ec13a8ee13cd979983369a76a6f0119fbff8aa1da6dc958d42fc083f28"} Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.287147 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fae6b5ec13a8ee13cd979983369a76a6f0119fbff8aa1da6dc958d42fc083f28" Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.287174 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.290797 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f65dbc04-da69-4a9e-b564-bd39da6f5afe-kube-api-access\") pod \"f65dbc04-da69-4a9e-b564-bd39da6f5afe\" (UID: \"f65dbc04-da69-4a9e-b564-bd39da6f5afe\") " Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.290965 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f65dbc04-da69-4a9e-b564-bd39da6f5afe-kubelet-dir\") pod \"f65dbc04-da69-4a9e-b564-bd39da6f5afe\" (UID: \"f65dbc04-da69-4a9e-b564-bd39da6f5afe\") " Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.291224 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f65dbc04-da69-4a9e-b564-bd39da6f5afe-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f65dbc04-da69-4a9e-b564-bd39da6f5afe" (UID: "f65dbc04-da69-4a9e-b564-bd39da6f5afe"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.336890 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.336975 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.336904 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f65dbc04-da69-4a9e-b564-bd39da6f5afe-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f65dbc04-da69-4a9e-b564-bd39da6f5afe" (UID: "f65dbc04-da69-4a9e-b564-bd39da6f5afe"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.392406 4730 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f65dbc04-da69-4a9e-b564-bd39da6f5afe-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.392500 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f65dbc04-da69-4a9e-b564-bd39da6f5afe-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.533241 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:52:02 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:52:02 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:52:02 crc kubenswrapper[4730]: healthz check failed Sep 30 09:52:02 crc kubenswrapper[4730]: I0930 09:52:02.533348 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:52:03 crc kubenswrapper[4730]: I0930 09:52:03.534625 4730 patch_prober.go:28] interesting pod/router-default-5444994796-m2sph container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 09:52:03 crc kubenswrapper[4730]: [-]has-synced failed: reason withheld Sep 30 09:52:03 crc kubenswrapper[4730]: [+]process-running ok Sep 30 09:52:03 crc kubenswrapper[4730]: healthz check failed Sep 30 09:52:03 crc kubenswrapper[4730]: I0930 09:52:03.534705 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-m2sph" podUID="16c0279b-a0e3-4400-be1a-c485c8ea0a34" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 09:52:04 crc kubenswrapper[4730]: I0930 09:52:04.533108 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:52:04 crc kubenswrapper[4730]: I0930 09:52:04.536137 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-m2sph" Sep 30 09:52:06 crc kubenswrapper[4730]: I0930 09:52:06.708246 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:52:07 crc kubenswrapper[4730]: I0930 09:52:07.920891 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:52:07 crc kubenswrapper[4730]: I0930 09:52:07.925284 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-md87h" Sep 30 09:52:08 crc kubenswrapper[4730]: I0930 09:52:08.103916 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-gf4lh" Sep 30 09:52:14 crc kubenswrapper[4730]: E0930 09:52:14.549222 4730 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 30 09:52:14 crc kubenswrapper[4730]: E0930 09:52:14.552001 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lvwfp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-drr8f_openshift-marketplace(57751804-fe40-4d95-a5b2-529037d3ba17): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 09:52:14 crc kubenswrapper[4730]: E0930 09:52:14.553352 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-drr8f" podUID="57751804-fe40-4d95-a5b2-529037d3ba17" Sep 30 09:52:18 crc kubenswrapper[4730]: I0930 09:52:18.226195 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-wqstr" Sep 30 09:52:20 crc kubenswrapper[4730]: E0930 09:52:20.825734 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-drr8f" podUID="57751804-fe40-4d95-a5b2-529037d3ba17" Sep 30 09:52:20 crc kubenswrapper[4730]: E0930 09:52:20.886639 4730 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 30 09:52:20 crc kubenswrapper[4730]: E0930 09:52:20.886883 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lv4g8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-zsmr9_openshift-marketplace(b6d72e26-75bd-4095-aae3-345a816de053): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 09:52:20 crc kubenswrapper[4730]: E0930 09:52:20.888044 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-zsmr9" podUID="b6d72e26-75bd-4095-aae3-345a816de053" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.262596 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-zsmr9" podUID="b6d72e26-75bd-4095-aae3-345a816de053" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.371960 4730 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.372133 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q4xfm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-fnbq4_openshift-marketplace(9ea0a817-d369-46fc-9e35-ab227abcbf25): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.373369 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-fnbq4" podUID="9ea0a817-d369-46fc-9e35-ab227abcbf25" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.380412 4730 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.380550 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vsmsb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-8srlp_openshift-marketplace(297e4d9f-18c2-4e11-a89b-11df93cba4ef): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.381845 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-8srlp" podUID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.402197 4730 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.402523 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2bllk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-5g8hq_openshift-marketplace(57333b74-aac2-4b1b-af18-e4dece554edb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.404341 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-5g8hq" podUID="57333b74-aac2-4b1b-af18-e4dece554edb" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.418998 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-8srlp" podUID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.420160 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-5g8hq" podUID="57333b74-aac2-4b1b-af18-e4dece554edb" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.420274 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-fnbq4" podUID="9ea0a817-d369-46fc-9e35-ab227abcbf25" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.423545 4730 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.423730 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xjprn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-v67gm_openshift-marketplace(30fe799a-bbf1-4e62-8187-353da7ca9930): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 09:52:22 crc kubenswrapper[4730]: E0930 09:52:22.424827 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-v67gm" podUID="30fe799a-bbf1-4e62-8187-353da7ca9930" Sep 30 09:52:22 crc kubenswrapper[4730]: I0930 09:52:22.729969 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-dqqrb"] Sep 30 09:52:22 crc kubenswrapper[4730]: W0930 09:52:22.734510 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe86a67e_c663_4551_9ecf_a8c2a9801cd7.slice/crio-970bf5e328f4517058f87939311578ce42fd389a7f805211f5cdaabb1990be97 WatchSource:0}: Error finding container 970bf5e328f4517058f87939311578ce42fd389a7f805211f5cdaabb1990be97: Status 404 returned error can't find the container with id 970bf5e328f4517058f87939311578ce42fd389a7f805211f5cdaabb1990be97 Sep 30 09:52:23 crc kubenswrapper[4730]: I0930 09:52:23.425591 4730 generic.go:334] "Generic (PLEG): container finished" podID="6fe7d185-5cd3-4de1-a842-065620d27fdf" containerID="cbc86d450df989e9ee9ffb6000f3fd1106310a79888f1178008d101ab6c7acc7" exitCode=0 Sep 30 09:52:23 crc kubenswrapper[4730]: I0930 09:52:23.425897 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbd6s" event={"ID":"6fe7d185-5cd3-4de1-a842-065620d27fdf","Type":"ContainerDied","Data":"cbc86d450df989e9ee9ffb6000f3fd1106310a79888f1178008d101ab6c7acc7"} Sep 30 09:52:23 crc kubenswrapper[4730]: I0930 09:52:23.433767 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" event={"ID":"be86a67e-c663-4551-9ecf-a8c2a9801cd7","Type":"ContainerStarted","Data":"e77f08c5ed6013843ce2202445bf199e17ac698bfbcc84ab82544d2847ed2a07"} Sep 30 09:52:23 crc kubenswrapper[4730]: I0930 09:52:23.433834 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" event={"ID":"be86a67e-c663-4551-9ecf-a8c2a9801cd7","Type":"ContainerStarted","Data":"dce9592bfdda92b51cf47f5dcde92bdca3291f4109d5e58d2091f6ab07e5a4dd"} Sep 30 09:52:23 crc kubenswrapper[4730]: I0930 09:52:23.433849 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-dqqrb" event={"ID":"be86a67e-c663-4551-9ecf-a8c2a9801cd7","Type":"ContainerStarted","Data":"970bf5e328f4517058f87939311578ce42fd389a7f805211f5cdaabb1990be97"} Sep 30 09:52:23 crc kubenswrapper[4730]: I0930 09:52:23.436439 4730 generic.go:334] "Generic (PLEG): container finished" podID="cd78d8d1-8617-4c3d-9205-abd8bbdde710" containerID="038740d5913ebafff9735e66423a0b5a4547372d92a89e51d2a6ead3783ee6f1" exitCode=0 Sep 30 09:52:23 crc kubenswrapper[4730]: I0930 09:52:23.436542 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8qhvr" event={"ID":"cd78d8d1-8617-4c3d-9205-abd8bbdde710","Type":"ContainerDied","Data":"038740d5913ebafff9735e66423a0b5a4547372d92a89e51d2a6ead3783ee6f1"} Sep 30 09:52:23 crc kubenswrapper[4730]: E0930 09:52:23.439425 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-v67gm" podUID="30fe799a-bbf1-4e62-8187-353da7ca9930" Sep 30 09:52:23 crc kubenswrapper[4730]: I0930 09:52:23.516257 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-dqqrb" podStartSLOduration=166.516221379 podStartE2EDuration="2m46.516221379s" podCreationTimestamp="2025-09-30 09:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:52:23.503591079 +0000 UTC m=+187.836851092" watchObservedRunningTime="2025-09-30 09:52:23.516221379 +0000 UTC m=+187.849481372" Sep 30 09:52:24 crc kubenswrapper[4730]: I0930 09:52:24.436000 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 09:52:24 crc kubenswrapper[4730]: I0930 09:52:24.447282 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8qhvr" event={"ID":"cd78d8d1-8617-4c3d-9205-abd8bbdde710","Type":"ContainerStarted","Data":"aa87a7828f88f9712358db9846d27c69240d9060ecb201c3a63761fa2827f7ef"} Sep 30 09:52:24 crc kubenswrapper[4730]: I0930 09:52:24.451733 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbd6s" event={"ID":"6fe7d185-5cd3-4de1-a842-065620d27fdf","Type":"ContainerStarted","Data":"fabe739ab29245e7f889556b17ed7e3ae04e17ffb336fc91c7fff81830deda51"} Sep 30 09:52:24 crc kubenswrapper[4730]: I0930 09:52:24.495331 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8qhvr" podStartSLOduration=3.20697291 podStartE2EDuration="41.495297014s" podCreationTimestamp="2025-09-30 09:51:43 +0000 UTC" firstStartedPulling="2025-09-30 09:51:45.763593744 +0000 UTC m=+150.096853737" lastFinishedPulling="2025-09-30 09:52:24.051917838 +0000 UTC m=+188.385177841" observedRunningTime="2025-09-30 09:52:24.492717312 +0000 UTC m=+188.825977305" watchObservedRunningTime="2025-09-30 09:52:24.495297014 +0000 UTC m=+188.828557017" Sep 30 09:52:24 crc kubenswrapper[4730]: I0930 09:52:24.516337 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fbd6s" podStartSLOduration=3.480655811 podStartE2EDuration="41.516321167s" podCreationTimestamp="2025-09-30 09:51:43 +0000 UTC" firstStartedPulling="2025-09-30 09:51:45.834781018 +0000 UTC m=+150.168041011" lastFinishedPulling="2025-09-30 09:52:23.870446374 +0000 UTC m=+188.203706367" observedRunningTime="2025-09-30 09:52:24.515006891 +0000 UTC m=+188.848266884" watchObservedRunningTime="2025-09-30 09:52:24.516321167 +0000 UTC m=+188.849581150" Sep 30 09:52:32 crc kubenswrapper[4730]: I0930 09:52:32.336486 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 09:52:32 crc kubenswrapper[4730]: I0930 09:52:32.336884 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 09:52:33 crc kubenswrapper[4730]: I0930 09:52:33.679661 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:52:33 crc kubenswrapper[4730]: I0930 09:52:33.679733 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:52:34 crc kubenswrapper[4730]: I0930 09:52:34.087823 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:52:34 crc kubenswrapper[4730]: I0930 09:52:34.087880 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:52:34 crc kubenswrapper[4730]: I0930 09:52:34.207039 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:52:34 crc kubenswrapper[4730]: I0930 09:52:34.207236 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:52:34 crc kubenswrapper[4730]: I0930 09:52:34.519174 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5g8hq" event={"ID":"57333b74-aac2-4b1b-af18-e4dece554edb","Type":"ContainerStarted","Data":"023ff43605a0395a68aaa779ba5dc415d6df8a3a8d37858e4705dfaaca9dc805"} Sep 30 09:52:34 crc kubenswrapper[4730]: I0930 09:52:34.563545 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:52:34 crc kubenswrapper[4730]: I0930 09:52:34.565460 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:52:35 crc kubenswrapper[4730]: I0930 09:52:35.527890 4730 generic.go:334] "Generic (PLEG): container finished" podID="b6d72e26-75bd-4095-aae3-345a816de053" containerID="3411de82bf4bc2eefe7a46dd6d190ce314bf23b314cbb1675296029bef44e649" exitCode=0 Sep 30 09:52:35 crc kubenswrapper[4730]: I0930 09:52:35.527962 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zsmr9" event={"ID":"b6d72e26-75bd-4095-aae3-345a816de053","Type":"ContainerDied","Data":"3411de82bf4bc2eefe7a46dd6d190ce314bf23b314cbb1675296029bef44e649"} Sep 30 09:52:35 crc kubenswrapper[4730]: I0930 09:52:35.531896 4730 generic.go:334] "Generic (PLEG): container finished" podID="9ea0a817-d369-46fc-9e35-ab227abcbf25" containerID="111dfe08a8f5e9afa79eadf369cdea9b1d6a0d7b6ad7f96191c0e34c5aa149fb" exitCode=0 Sep 30 09:52:35 crc kubenswrapper[4730]: I0930 09:52:35.531986 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fnbq4" event={"ID":"9ea0a817-d369-46fc-9e35-ab227abcbf25","Type":"ContainerDied","Data":"111dfe08a8f5e9afa79eadf369cdea9b1d6a0d7b6ad7f96191c0e34c5aa149fb"} Sep 30 09:52:35 crc kubenswrapper[4730]: I0930 09:52:35.535105 4730 generic.go:334] "Generic (PLEG): container finished" podID="57333b74-aac2-4b1b-af18-e4dece554edb" containerID="023ff43605a0395a68aaa779ba5dc415d6df8a3a8d37858e4705dfaaca9dc805" exitCode=0 Sep 30 09:52:35 crc kubenswrapper[4730]: I0930 09:52:35.535133 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5g8hq" event={"ID":"57333b74-aac2-4b1b-af18-e4dece554edb","Type":"ContainerDied","Data":"023ff43605a0395a68aaa779ba5dc415d6df8a3a8d37858e4705dfaaca9dc805"} Sep 30 09:52:36 crc kubenswrapper[4730]: I0930 09:52:36.544997 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drr8f" event={"ID":"57751804-fe40-4d95-a5b2-529037d3ba17","Type":"ContainerStarted","Data":"fdc364537b0a5cb838a7b83d0f14649acaa28c191fb76dac967641459c83adac"} Sep 30 09:52:36 crc kubenswrapper[4730]: I0930 09:52:36.549271 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zsmr9" event={"ID":"b6d72e26-75bd-4095-aae3-345a816de053","Type":"ContainerStarted","Data":"02e67d19b5cf234a7b584a938cd4b5abb419b2f5e69d442bca39d303b889c33f"} Sep 30 09:52:36 crc kubenswrapper[4730]: I0930 09:52:36.552669 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fnbq4" event={"ID":"9ea0a817-d369-46fc-9e35-ab227abcbf25","Type":"ContainerStarted","Data":"7a4872c1de0676f328202646734b87e601bf87a09db7059c0ce843233070c99d"} Sep 30 09:52:36 crc kubenswrapper[4730]: I0930 09:52:36.556586 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5g8hq" event={"ID":"57333b74-aac2-4b1b-af18-e4dece554edb","Type":"ContainerStarted","Data":"ce3642118be30648d075448cc24aed002b9f6c61fba1bc57687097b763091ecc"} Sep 30 09:52:36 crc kubenswrapper[4730]: I0930 09:52:36.636563 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fnbq4" podStartSLOduration=3.172383771 podStartE2EDuration="53.636542838s" podCreationTimestamp="2025-09-30 09:51:43 +0000 UTC" firstStartedPulling="2025-09-30 09:51:45.741328776 +0000 UTC m=+150.074588769" lastFinishedPulling="2025-09-30 09:52:36.205487843 +0000 UTC m=+200.538747836" observedRunningTime="2025-09-30 09:52:36.61159371 +0000 UTC m=+200.944853703" watchObservedRunningTime="2025-09-30 09:52:36.636542838 +0000 UTC m=+200.969802831" Sep 30 09:52:36 crc kubenswrapper[4730]: I0930 09:52:36.637244 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:52:36 crc kubenswrapper[4730]: I0930 09:52:36.637298 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:52:36 crc kubenswrapper[4730]: I0930 09:52:36.638648 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zsmr9" podStartSLOduration=2.378138746 podStartE2EDuration="51.638636838s" podCreationTimestamp="2025-09-30 09:51:45 +0000 UTC" firstStartedPulling="2025-09-30 09:51:46.900241438 +0000 UTC m=+151.233501431" lastFinishedPulling="2025-09-30 09:52:36.16073953 +0000 UTC m=+200.493999523" observedRunningTime="2025-09-30 09:52:36.63290417 +0000 UTC m=+200.966164173" watchObservedRunningTime="2025-09-30 09:52:36.638636838 +0000 UTC m=+200.971896831" Sep 30 09:52:36 crc kubenswrapper[4730]: I0930 09:52:36.662536 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5g8hq" podStartSLOduration=2.324672598 podStartE2EDuration="50.66251991s" podCreationTimestamp="2025-09-30 09:51:46 +0000 UTC" firstStartedPulling="2025-09-30 09:51:47.942745082 +0000 UTC m=+152.276005065" lastFinishedPulling="2025-09-30 09:52:36.280592384 +0000 UTC m=+200.613852377" observedRunningTime="2025-09-30 09:52:36.657947001 +0000 UTC m=+200.991207004" watchObservedRunningTime="2025-09-30 09:52:36.66251991 +0000 UTC m=+200.995779903" Sep 30 09:52:37 crc kubenswrapper[4730]: I0930 09:52:37.564124 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v67gm" event={"ID":"30fe799a-bbf1-4e62-8187-353da7ca9930","Type":"ContainerStarted","Data":"ee10c0db76df313c801f4da0341b6ae7b4cfd5173c6fc0bbf9fff986a942cc3b"} Sep 30 09:52:37 crc kubenswrapper[4730]: I0930 09:52:37.565901 4730 generic.go:334] "Generic (PLEG): container finished" podID="57751804-fe40-4d95-a5b2-529037d3ba17" containerID="fdc364537b0a5cb838a7b83d0f14649acaa28c191fb76dac967641459c83adac" exitCode=0 Sep 30 09:52:37 crc kubenswrapper[4730]: I0930 09:52:37.565982 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drr8f" event={"ID":"57751804-fe40-4d95-a5b2-529037d3ba17","Type":"ContainerDied","Data":"fdc364537b0a5cb838a7b83d0f14649acaa28c191fb76dac967641459c83adac"} Sep 30 09:52:37 crc kubenswrapper[4730]: I0930 09:52:37.568026 4730 generic.go:334] "Generic (PLEG): container finished" podID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" containerID="2b249965378a3268bdf440753f9456189591ef744854c5a1637c9ed667704027" exitCode=0 Sep 30 09:52:37 crc kubenswrapper[4730]: I0930 09:52:37.568122 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8srlp" event={"ID":"297e4d9f-18c2-4e11-a89b-11df93cba4ef","Type":"ContainerDied","Data":"2b249965378a3268bdf440753f9456189591ef744854c5a1637c9ed667704027"} Sep 30 09:52:37 crc kubenswrapper[4730]: I0930 09:52:37.678428 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5g8hq" podUID="57333b74-aac2-4b1b-af18-e4dece554edb" containerName="registry-server" probeResult="failure" output=< Sep 30 09:52:37 crc kubenswrapper[4730]: timeout: failed to connect service ":50051" within 1s Sep 30 09:52:37 crc kubenswrapper[4730]: > Sep 30 09:52:38 crc kubenswrapper[4730]: I0930 09:52:38.576363 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drr8f" event={"ID":"57751804-fe40-4d95-a5b2-529037d3ba17","Type":"ContainerStarted","Data":"7fcc79f48ff06aed0ce771e444c296a9ff68fc37a05c6403c0576b1b41e6f1bd"} Sep 30 09:52:38 crc kubenswrapper[4730]: I0930 09:52:38.578993 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8srlp" event={"ID":"297e4d9f-18c2-4e11-a89b-11df93cba4ef","Type":"ContainerStarted","Data":"076200e1ceb190f3029eab12693d4076afbdea446ff459c308e7de87ad3331f3"} Sep 30 09:52:38 crc kubenswrapper[4730]: I0930 09:52:38.580842 4730 generic.go:334] "Generic (PLEG): container finished" podID="30fe799a-bbf1-4e62-8187-353da7ca9930" containerID="ee10c0db76df313c801f4da0341b6ae7b4cfd5173c6fc0bbf9fff986a942cc3b" exitCode=0 Sep 30 09:52:38 crc kubenswrapper[4730]: I0930 09:52:38.580905 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v67gm" event={"ID":"30fe799a-bbf1-4e62-8187-353da7ca9930","Type":"ContainerDied","Data":"ee10c0db76df313c801f4da0341b6ae7b4cfd5173c6fc0bbf9fff986a942cc3b"} Sep 30 09:52:38 crc kubenswrapper[4730]: I0930 09:52:38.593863 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-drr8f" podStartSLOduration=3.506145536 podStartE2EDuration="52.593846017s" podCreationTimestamp="2025-09-30 09:51:46 +0000 UTC" firstStartedPulling="2025-09-30 09:51:48.995925922 +0000 UTC m=+153.329185915" lastFinishedPulling="2025-09-30 09:52:38.083626393 +0000 UTC m=+202.416886396" observedRunningTime="2025-09-30 09:52:38.59188742 +0000 UTC m=+202.925147413" watchObservedRunningTime="2025-09-30 09:52:38.593846017 +0000 UTC m=+202.927106010" Sep 30 09:52:38 crc kubenswrapper[4730]: I0930 09:52:38.610036 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8srlp" podStartSLOduration=2.30124451 podStartE2EDuration="53.610016044s" podCreationTimestamp="2025-09-30 09:51:45 +0000 UTC" firstStartedPulling="2025-09-30 09:51:46.90499492 +0000 UTC m=+151.238254913" lastFinishedPulling="2025-09-30 09:52:38.213766454 +0000 UTC m=+202.547026447" observedRunningTime="2025-09-30 09:52:38.606351707 +0000 UTC m=+202.939611710" watchObservedRunningTime="2025-09-30 09:52:38.610016044 +0000 UTC m=+202.943276047" Sep 30 09:52:38 crc kubenswrapper[4730]: I0930 09:52:38.820916 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fbd6s"] Sep 30 09:52:38 crc kubenswrapper[4730]: I0930 09:52:38.821471 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fbd6s" podUID="6fe7d185-5cd3-4de1-a842-065620d27fdf" containerName="registry-server" containerID="cri-o://fabe739ab29245e7f889556b17ed7e3ae04e17ffb336fc91c7fff81830deda51" gracePeriod=2 Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.256573 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.399763 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gl7dn\" (UniqueName: \"kubernetes.io/projected/6fe7d185-5cd3-4de1-a842-065620d27fdf-kube-api-access-gl7dn\") pod \"6fe7d185-5cd3-4de1-a842-065620d27fdf\" (UID: \"6fe7d185-5cd3-4de1-a842-065620d27fdf\") " Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.399891 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fe7d185-5cd3-4de1-a842-065620d27fdf-catalog-content\") pod \"6fe7d185-5cd3-4de1-a842-065620d27fdf\" (UID: \"6fe7d185-5cd3-4de1-a842-065620d27fdf\") " Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.399942 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fe7d185-5cd3-4de1-a842-065620d27fdf-utilities\") pod \"6fe7d185-5cd3-4de1-a842-065620d27fdf\" (UID: \"6fe7d185-5cd3-4de1-a842-065620d27fdf\") " Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.400977 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fe7d185-5cd3-4de1-a842-065620d27fdf-utilities" (OuterVolumeSpecName: "utilities") pod "6fe7d185-5cd3-4de1-a842-065620d27fdf" (UID: "6fe7d185-5cd3-4de1-a842-065620d27fdf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.408306 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fe7d185-5cd3-4de1-a842-065620d27fdf-kube-api-access-gl7dn" (OuterVolumeSpecName: "kube-api-access-gl7dn") pod "6fe7d185-5cd3-4de1-a842-065620d27fdf" (UID: "6fe7d185-5cd3-4de1-a842-065620d27fdf"). InnerVolumeSpecName "kube-api-access-gl7dn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.446598 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fe7d185-5cd3-4de1-a842-065620d27fdf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6fe7d185-5cd3-4de1-a842-065620d27fdf" (UID: "6fe7d185-5cd3-4de1-a842-065620d27fdf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.501784 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gl7dn\" (UniqueName: \"kubernetes.io/projected/6fe7d185-5cd3-4de1-a842-065620d27fdf-kube-api-access-gl7dn\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.502004 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fe7d185-5cd3-4de1-a842-065620d27fdf-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.502016 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fe7d185-5cd3-4de1-a842-065620d27fdf-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.587672 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v67gm" event={"ID":"30fe799a-bbf1-4e62-8187-353da7ca9930","Type":"ContainerStarted","Data":"86ac70a329c999e6e54dec26aa8022001bd78f53a70c6b0304b41ffcda18e6af"} Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.591145 4730 generic.go:334] "Generic (PLEG): container finished" podID="6fe7d185-5cd3-4de1-a842-065620d27fdf" containerID="fabe739ab29245e7f889556b17ed7e3ae04e17ffb336fc91c7fff81830deda51" exitCode=0 Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.591177 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbd6s" event={"ID":"6fe7d185-5cd3-4de1-a842-065620d27fdf","Type":"ContainerDied","Data":"fabe739ab29245e7f889556b17ed7e3ae04e17ffb336fc91c7fff81830deda51"} Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.591196 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbd6s" event={"ID":"6fe7d185-5cd3-4de1-a842-065620d27fdf","Type":"ContainerDied","Data":"caf4527daaf45ed1b9844bb9c959a04ded47e26b11052061b104e0f0732f4606"} Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.591213 4730 scope.go:117] "RemoveContainer" containerID="fabe739ab29245e7f889556b17ed7e3ae04e17ffb336fc91c7fff81830deda51" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.591288 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fbd6s" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.623345 4730 scope.go:117] "RemoveContainer" containerID="cbc86d450df989e9ee9ffb6000f3fd1106310a79888f1178008d101ab6c7acc7" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.623569 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v67gm" podStartSLOduration=3.340452334 podStartE2EDuration="56.623550775s" podCreationTimestamp="2025-09-30 09:51:43 +0000 UTC" firstStartedPulling="2025-09-30 09:51:45.737128319 +0000 UTC m=+150.070388312" lastFinishedPulling="2025-09-30 09:52:39.02022676 +0000 UTC m=+203.353486753" observedRunningTime="2025-09-30 09:52:39.62249353 +0000 UTC m=+203.955753543" watchObservedRunningTime="2025-09-30 09:52:39.623550775 +0000 UTC m=+203.956810768" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.642735 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fbd6s"] Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.643726 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fbd6s"] Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.650524 4730 scope.go:117] "RemoveContainer" containerID="c7192a5752bbec093162bf63e7301f5c41386e30115e40bfe58dafe0969d70b2" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.663567 4730 scope.go:117] "RemoveContainer" containerID="fabe739ab29245e7f889556b17ed7e3ae04e17ffb336fc91c7fff81830deda51" Sep 30 09:52:39 crc kubenswrapper[4730]: E0930 09:52:39.664018 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fabe739ab29245e7f889556b17ed7e3ae04e17ffb336fc91c7fff81830deda51\": container with ID starting with fabe739ab29245e7f889556b17ed7e3ae04e17ffb336fc91c7fff81830deda51 not found: ID does not exist" containerID="fabe739ab29245e7f889556b17ed7e3ae04e17ffb336fc91c7fff81830deda51" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.664081 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fabe739ab29245e7f889556b17ed7e3ae04e17ffb336fc91c7fff81830deda51"} err="failed to get container status \"fabe739ab29245e7f889556b17ed7e3ae04e17ffb336fc91c7fff81830deda51\": rpc error: code = NotFound desc = could not find container \"fabe739ab29245e7f889556b17ed7e3ae04e17ffb336fc91c7fff81830deda51\": container with ID starting with fabe739ab29245e7f889556b17ed7e3ae04e17ffb336fc91c7fff81830deda51 not found: ID does not exist" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.664131 4730 scope.go:117] "RemoveContainer" containerID="cbc86d450df989e9ee9ffb6000f3fd1106310a79888f1178008d101ab6c7acc7" Sep 30 09:52:39 crc kubenswrapper[4730]: E0930 09:52:39.664440 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbc86d450df989e9ee9ffb6000f3fd1106310a79888f1178008d101ab6c7acc7\": container with ID starting with cbc86d450df989e9ee9ffb6000f3fd1106310a79888f1178008d101ab6c7acc7 not found: ID does not exist" containerID="cbc86d450df989e9ee9ffb6000f3fd1106310a79888f1178008d101ab6c7acc7" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.664470 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbc86d450df989e9ee9ffb6000f3fd1106310a79888f1178008d101ab6c7acc7"} err="failed to get container status \"cbc86d450df989e9ee9ffb6000f3fd1106310a79888f1178008d101ab6c7acc7\": rpc error: code = NotFound desc = could not find container \"cbc86d450df989e9ee9ffb6000f3fd1106310a79888f1178008d101ab6c7acc7\": container with ID starting with cbc86d450df989e9ee9ffb6000f3fd1106310a79888f1178008d101ab6c7acc7 not found: ID does not exist" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.664494 4730 scope.go:117] "RemoveContainer" containerID="c7192a5752bbec093162bf63e7301f5c41386e30115e40bfe58dafe0969d70b2" Sep 30 09:52:39 crc kubenswrapper[4730]: E0930 09:52:39.664767 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7192a5752bbec093162bf63e7301f5c41386e30115e40bfe58dafe0969d70b2\": container with ID starting with c7192a5752bbec093162bf63e7301f5c41386e30115e40bfe58dafe0969d70b2 not found: ID does not exist" containerID="c7192a5752bbec093162bf63e7301f5c41386e30115e40bfe58dafe0969d70b2" Sep 30 09:52:39 crc kubenswrapper[4730]: I0930 09:52:39.664797 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7192a5752bbec093162bf63e7301f5c41386e30115e40bfe58dafe0969d70b2"} err="failed to get container status \"c7192a5752bbec093162bf63e7301f5c41386e30115e40bfe58dafe0969d70b2\": rpc error: code = NotFound desc = could not find container \"c7192a5752bbec093162bf63e7301f5c41386e30115e40bfe58dafe0969d70b2\": container with ID starting with c7192a5752bbec093162bf63e7301f5c41386e30115e40bfe58dafe0969d70b2 not found: ID does not exist" Sep 30 09:52:40 crc kubenswrapper[4730]: I0930 09:52:40.388551 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fe7d185-5cd3-4de1-a842-065620d27fdf" path="/var/lib/kubelet/pods/6fe7d185-5cd3-4de1-a842-065620d27fdf/volumes" Sep 30 09:52:43 crc kubenswrapper[4730]: I0930 09:52:43.576785 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:52:43 crc kubenswrapper[4730]: I0930 09:52:43.578103 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:52:43 crc kubenswrapper[4730]: I0930 09:52:43.628029 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:52:43 crc kubenswrapper[4730]: I0930 09:52:43.907437 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:52:43 crc kubenswrapper[4730]: I0930 09:52:43.907826 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:52:43 crc kubenswrapper[4730]: I0930 09:52:43.944936 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:52:44 crc kubenswrapper[4730]: I0930 09:52:44.665723 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:52:44 crc kubenswrapper[4730]: I0930 09:52:44.682724 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:52:45 crc kubenswrapper[4730]: I0930 09:52:45.802520 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:52:45 crc kubenswrapper[4730]: I0930 09:52:45.802839 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:52:45 crc kubenswrapper[4730]: I0930 09:52:45.820973 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v67gm"] Sep 30 09:52:45 crc kubenswrapper[4730]: I0930 09:52:45.852564 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:52:46 crc kubenswrapper[4730]: I0930 09:52:46.097689 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:52:46 crc kubenswrapper[4730]: I0930 09:52:46.098057 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:52:46 crc kubenswrapper[4730]: I0930 09:52:46.143960 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:52:46 crc kubenswrapper[4730]: I0930 09:52:46.535240 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-r8tph"] Sep 30 09:52:46 crc kubenswrapper[4730]: I0930 09:52:46.632975 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v67gm" podUID="30fe799a-bbf1-4e62-8187-353da7ca9930" containerName="registry-server" containerID="cri-o://86ac70a329c999e6e54dec26aa8022001bd78f53a70c6b0304b41ffcda18e6af" gracePeriod=2 Sep 30 09:52:46 crc kubenswrapper[4730]: I0930 09:52:46.682322 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:52:46 crc kubenswrapper[4730]: I0930 09:52:46.685326 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:52:46 crc kubenswrapper[4730]: I0930 09:52:46.689357 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:52:46 crc kubenswrapper[4730]: I0930 09:52:46.733686 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.086499 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.086562 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.125912 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.644004 4730 generic.go:334] "Generic (PLEG): container finished" podID="30fe799a-bbf1-4e62-8187-353da7ca9930" containerID="86ac70a329c999e6e54dec26aa8022001bd78f53a70c6b0304b41ffcda18e6af" exitCode=0 Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.644089 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v67gm" event={"ID":"30fe799a-bbf1-4e62-8187-353da7ca9930","Type":"ContainerDied","Data":"86ac70a329c999e6e54dec26aa8022001bd78f53a70c6b0304b41ffcda18e6af"} Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.687842 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.719373 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.804461 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30fe799a-bbf1-4e62-8187-353da7ca9930-catalog-content\") pod \"30fe799a-bbf1-4e62-8187-353da7ca9930\" (UID: \"30fe799a-bbf1-4e62-8187-353da7ca9930\") " Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.804548 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjprn\" (UniqueName: \"kubernetes.io/projected/30fe799a-bbf1-4e62-8187-353da7ca9930-kube-api-access-xjprn\") pod \"30fe799a-bbf1-4e62-8187-353da7ca9930\" (UID: \"30fe799a-bbf1-4e62-8187-353da7ca9930\") " Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.804759 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30fe799a-bbf1-4e62-8187-353da7ca9930-utilities\") pod \"30fe799a-bbf1-4e62-8187-353da7ca9930\" (UID: \"30fe799a-bbf1-4e62-8187-353da7ca9930\") " Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.805666 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30fe799a-bbf1-4e62-8187-353da7ca9930-utilities" (OuterVolumeSpecName: "utilities") pod "30fe799a-bbf1-4e62-8187-353da7ca9930" (UID: "30fe799a-bbf1-4e62-8187-353da7ca9930"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.810019 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30fe799a-bbf1-4e62-8187-353da7ca9930-kube-api-access-xjprn" (OuterVolumeSpecName: "kube-api-access-xjprn") pod "30fe799a-bbf1-4e62-8187-353da7ca9930" (UID: "30fe799a-bbf1-4e62-8187-353da7ca9930"). InnerVolumeSpecName "kube-api-access-xjprn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.880728 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30fe799a-bbf1-4e62-8187-353da7ca9930-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "30fe799a-bbf1-4e62-8187-353da7ca9930" (UID: "30fe799a-bbf1-4e62-8187-353da7ca9930"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.905647 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30fe799a-bbf1-4e62-8187-353da7ca9930-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.905673 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30fe799a-bbf1-4e62-8187-353da7ca9930-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:47 crc kubenswrapper[4730]: I0930 09:52:47.905685 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjprn\" (UniqueName: \"kubernetes.io/projected/30fe799a-bbf1-4e62-8187-353da7ca9930-kube-api-access-xjprn\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:48 crc kubenswrapper[4730]: I0930 09:52:48.022974 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zsmr9"] Sep 30 09:52:48 crc kubenswrapper[4730]: I0930 09:52:48.653222 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v67gm" event={"ID":"30fe799a-bbf1-4e62-8187-353da7ca9930","Type":"ContainerDied","Data":"121b681cbb69bb5363a0405cfc095365c6bc659202242b2c3eb65d7ce453c42d"} Sep 30 09:52:48 crc kubenswrapper[4730]: I0930 09:52:48.653504 4730 scope.go:117] "RemoveContainer" containerID="86ac70a329c999e6e54dec26aa8022001bd78f53a70c6b0304b41ffcda18e6af" Sep 30 09:52:48 crc kubenswrapper[4730]: I0930 09:52:48.653320 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v67gm" Sep 30 09:52:48 crc kubenswrapper[4730]: I0930 09:52:48.670204 4730 scope.go:117] "RemoveContainer" containerID="ee10c0db76df313c801f4da0341b6ae7b4cfd5173c6fc0bbf9fff986a942cc3b" Sep 30 09:52:48 crc kubenswrapper[4730]: I0930 09:52:48.687188 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v67gm"] Sep 30 09:52:48 crc kubenswrapper[4730]: I0930 09:52:48.687293 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v67gm"] Sep 30 09:52:48 crc kubenswrapper[4730]: I0930 09:52:48.691466 4730 scope.go:117] "RemoveContainer" containerID="ff3d5848c16585363eb437317caaaa90cfcbd997cb68b0c1f33a8d0a61419652" Sep 30 09:52:49 crc kubenswrapper[4730]: I0930 09:52:49.659921 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zsmr9" podUID="b6d72e26-75bd-4095-aae3-345a816de053" containerName="registry-server" containerID="cri-o://02e67d19b5cf234a7b584a938cd4b5abb419b2f5e69d442bca39d303b889c33f" gracePeriod=2 Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.010117 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.142336 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6d72e26-75bd-4095-aae3-345a816de053-catalog-content\") pod \"b6d72e26-75bd-4095-aae3-345a816de053\" (UID: \"b6d72e26-75bd-4095-aae3-345a816de053\") " Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.142443 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6d72e26-75bd-4095-aae3-345a816de053-utilities\") pod \"b6d72e26-75bd-4095-aae3-345a816de053\" (UID: \"b6d72e26-75bd-4095-aae3-345a816de053\") " Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.142516 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lv4g8\" (UniqueName: \"kubernetes.io/projected/b6d72e26-75bd-4095-aae3-345a816de053-kube-api-access-lv4g8\") pod \"b6d72e26-75bd-4095-aae3-345a816de053\" (UID: \"b6d72e26-75bd-4095-aae3-345a816de053\") " Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.143129 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6d72e26-75bd-4095-aae3-345a816de053-utilities" (OuterVolumeSpecName: "utilities") pod "b6d72e26-75bd-4095-aae3-345a816de053" (UID: "b6d72e26-75bd-4095-aae3-345a816de053"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.150703 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6d72e26-75bd-4095-aae3-345a816de053-kube-api-access-lv4g8" (OuterVolumeSpecName: "kube-api-access-lv4g8") pod "b6d72e26-75bd-4095-aae3-345a816de053" (UID: "b6d72e26-75bd-4095-aae3-345a816de053"). InnerVolumeSpecName "kube-api-access-lv4g8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.154412 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6d72e26-75bd-4095-aae3-345a816de053-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b6d72e26-75bd-4095-aae3-345a816de053" (UID: "b6d72e26-75bd-4095-aae3-345a816de053"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.222129 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-drr8f"] Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.222355 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-drr8f" podUID="57751804-fe40-4d95-a5b2-529037d3ba17" containerName="registry-server" containerID="cri-o://7fcc79f48ff06aed0ce771e444c296a9ff68fc37a05c6403c0576b1b41e6f1bd" gracePeriod=2 Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.244826 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lv4g8\" (UniqueName: \"kubernetes.io/projected/b6d72e26-75bd-4095-aae3-345a816de053-kube-api-access-lv4g8\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.244890 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6d72e26-75bd-4095-aae3-345a816de053-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.244905 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6d72e26-75bd-4095-aae3-345a816de053-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.386678 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30fe799a-bbf1-4e62-8187-353da7ca9930" path="/var/lib/kubelet/pods/30fe799a-bbf1-4e62-8187-353da7ca9930/volumes" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.595599 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.666107 4730 generic.go:334] "Generic (PLEG): container finished" podID="b6d72e26-75bd-4095-aae3-345a816de053" containerID="02e67d19b5cf234a7b584a938cd4b5abb419b2f5e69d442bca39d303b889c33f" exitCode=0 Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.666179 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zsmr9" event={"ID":"b6d72e26-75bd-4095-aae3-345a816de053","Type":"ContainerDied","Data":"02e67d19b5cf234a7b584a938cd4b5abb419b2f5e69d442bca39d303b889c33f"} Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.666237 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zsmr9" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.666274 4730 scope.go:117] "RemoveContainer" containerID="02e67d19b5cf234a7b584a938cd4b5abb419b2f5e69d442bca39d303b889c33f" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.666258 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zsmr9" event={"ID":"b6d72e26-75bd-4095-aae3-345a816de053","Type":"ContainerDied","Data":"49f0e8db32ba925ba6d847e5c36be1bf05bb171caa8ef6dda7eaacad1f750081"} Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.670095 4730 generic.go:334] "Generic (PLEG): container finished" podID="57751804-fe40-4d95-a5b2-529037d3ba17" containerID="7fcc79f48ff06aed0ce771e444c296a9ff68fc37a05c6403c0576b1b41e6f1bd" exitCode=0 Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.670132 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drr8f" event={"ID":"57751804-fe40-4d95-a5b2-529037d3ba17","Type":"ContainerDied","Data":"7fcc79f48ff06aed0ce771e444c296a9ff68fc37a05c6403c0576b1b41e6f1bd"} Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.670156 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-drr8f" event={"ID":"57751804-fe40-4d95-a5b2-529037d3ba17","Type":"ContainerDied","Data":"cff2d873c4c0adabe867feb666f6c6e7899eaa4baa05dff2864d870effe64416"} Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.670218 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-drr8f" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.686283 4730 scope.go:117] "RemoveContainer" containerID="3411de82bf4bc2eefe7a46dd6d190ce314bf23b314cbb1675296029bef44e649" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.689728 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zsmr9"] Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.693755 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zsmr9"] Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.701402 4730 scope.go:117] "RemoveContainer" containerID="917c2735a5940773083e97852550f4198cf6df8bae127c4849cfb8c9c0cdde7c" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.714286 4730 scope.go:117] "RemoveContainer" containerID="02e67d19b5cf234a7b584a938cd4b5abb419b2f5e69d442bca39d303b889c33f" Sep 30 09:52:50 crc kubenswrapper[4730]: E0930 09:52:50.714723 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02e67d19b5cf234a7b584a938cd4b5abb419b2f5e69d442bca39d303b889c33f\": container with ID starting with 02e67d19b5cf234a7b584a938cd4b5abb419b2f5e69d442bca39d303b889c33f not found: ID does not exist" containerID="02e67d19b5cf234a7b584a938cd4b5abb419b2f5e69d442bca39d303b889c33f" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.714776 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02e67d19b5cf234a7b584a938cd4b5abb419b2f5e69d442bca39d303b889c33f"} err="failed to get container status \"02e67d19b5cf234a7b584a938cd4b5abb419b2f5e69d442bca39d303b889c33f\": rpc error: code = NotFound desc = could not find container \"02e67d19b5cf234a7b584a938cd4b5abb419b2f5e69d442bca39d303b889c33f\": container with ID starting with 02e67d19b5cf234a7b584a938cd4b5abb419b2f5e69d442bca39d303b889c33f not found: ID does not exist" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.714809 4730 scope.go:117] "RemoveContainer" containerID="3411de82bf4bc2eefe7a46dd6d190ce314bf23b314cbb1675296029bef44e649" Sep 30 09:52:50 crc kubenswrapper[4730]: E0930 09:52:50.715300 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3411de82bf4bc2eefe7a46dd6d190ce314bf23b314cbb1675296029bef44e649\": container with ID starting with 3411de82bf4bc2eefe7a46dd6d190ce314bf23b314cbb1675296029bef44e649 not found: ID does not exist" containerID="3411de82bf4bc2eefe7a46dd6d190ce314bf23b314cbb1675296029bef44e649" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.715357 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3411de82bf4bc2eefe7a46dd6d190ce314bf23b314cbb1675296029bef44e649"} err="failed to get container status \"3411de82bf4bc2eefe7a46dd6d190ce314bf23b314cbb1675296029bef44e649\": rpc error: code = NotFound desc = could not find container \"3411de82bf4bc2eefe7a46dd6d190ce314bf23b314cbb1675296029bef44e649\": container with ID starting with 3411de82bf4bc2eefe7a46dd6d190ce314bf23b314cbb1675296029bef44e649 not found: ID does not exist" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.715404 4730 scope.go:117] "RemoveContainer" containerID="917c2735a5940773083e97852550f4198cf6df8bae127c4849cfb8c9c0cdde7c" Sep 30 09:52:50 crc kubenswrapper[4730]: E0930 09:52:50.715784 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"917c2735a5940773083e97852550f4198cf6df8bae127c4849cfb8c9c0cdde7c\": container with ID starting with 917c2735a5940773083e97852550f4198cf6df8bae127c4849cfb8c9c0cdde7c not found: ID does not exist" containerID="917c2735a5940773083e97852550f4198cf6df8bae127c4849cfb8c9c0cdde7c" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.715825 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"917c2735a5940773083e97852550f4198cf6df8bae127c4849cfb8c9c0cdde7c"} err="failed to get container status \"917c2735a5940773083e97852550f4198cf6df8bae127c4849cfb8c9c0cdde7c\": rpc error: code = NotFound desc = could not find container \"917c2735a5940773083e97852550f4198cf6df8bae127c4849cfb8c9c0cdde7c\": container with ID starting with 917c2735a5940773083e97852550f4198cf6df8bae127c4849cfb8c9c0cdde7c not found: ID does not exist" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.715846 4730 scope.go:117] "RemoveContainer" containerID="7fcc79f48ff06aed0ce771e444c296a9ff68fc37a05c6403c0576b1b41e6f1bd" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.728413 4730 scope.go:117] "RemoveContainer" containerID="fdc364537b0a5cb838a7b83d0f14649acaa28c191fb76dac967641459c83adac" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.743500 4730 scope.go:117] "RemoveContainer" containerID="ace0bf4216c56b67458dd27514f850e9c4ca94d3fa487db289f01f806982fd04" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.750602 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57751804-fe40-4d95-a5b2-529037d3ba17-utilities\") pod \"57751804-fe40-4d95-a5b2-529037d3ba17\" (UID: \"57751804-fe40-4d95-a5b2-529037d3ba17\") " Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.750696 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvwfp\" (UniqueName: \"kubernetes.io/projected/57751804-fe40-4d95-a5b2-529037d3ba17-kube-api-access-lvwfp\") pod \"57751804-fe40-4d95-a5b2-529037d3ba17\" (UID: \"57751804-fe40-4d95-a5b2-529037d3ba17\") " Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.750761 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57751804-fe40-4d95-a5b2-529037d3ba17-catalog-content\") pod \"57751804-fe40-4d95-a5b2-529037d3ba17\" (UID: \"57751804-fe40-4d95-a5b2-529037d3ba17\") " Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.752113 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57751804-fe40-4d95-a5b2-529037d3ba17-utilities" (OuterVolumeSpecName: "utilities") pod "57751804-fe40-4d95-a5b2-529037d3ba17" (UID: "57751804-fe40-4d95-a5b2-529037d3ba17"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.754492 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57751804-fe40-4d95-a5b2-529037d3ba17-kube-api-access-lvwfp" (OuterVolumeSpecName: "kube-api-access-lvwfp") pod "57751804-fe40-4d95-a5b2-529037d3ba17" (UID: "57751804-fe40-4d95-a5b2-529037d3ba17"). InnerVolumeSpecName "kube-api-access-lvwfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.759970 4730 scope.go:117] "RemoveContainer" containerID="7fcc79f48ff06aed0ce771e444c296a9ff68fc37a05c6403c0576b1b41e6f1bd" Sep 30 09:52:50 crc kubenswrapper[4730]: E0930 09:52:50.760646 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fcc79f48ff06aed0ce771e444c296a9ff68fc37a05c6403c0576b1b41e6f1bd\": container with ID starting with 7fcc79f48ff06aed0ce771e444c296a9ff68fc37a05c6403c0576b1b41e6f1bd not found: ID does not exist" containerID="7fcc79f48ff06aed0ce771e444c296a9ff68fc37a05c6403c0576b1b41e6f1bd" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.760715 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fcc79f48ff06aed0ce771e444c296a9ff68fc37a05c6403c0576b1b41e6f1bd"} err="failed to get container status \"7fcc79f48ff06aed0ce771e444c296a9ff68fc37a05c6403c0576b1b41e6f1bd\": rpc error: code = NotFound desc = could not find container \"7fcc79f48ff06aed0ce771e444c296a9ff68fc37a05c6403c0576b1b41e6f1bd\": container with ID starting with 7fcc79f48ff06aed0ce771e444c296a9ff68fc37a05c6403c0576b1b41e6f1bd not found: ID does not exist" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.760755 4730 scope.go:117] "RemoveContainer" containerID="fdc364537b0a5cb838a7b83d0f14649acaa28c191fb76dac967641459c83adac" Sep 30 09:52:50 crc kubenswrapper[4730]: E0930 09:52:50.761160 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdc364537b0a5cb838a7b83d0f14649acaa28c191fb76dac967641459c83adac\": container with ID starting with fdc364537b0a5cb838a7b83d0f14649acaa28c191fb76dac967641459c83adac not found: ID does not exist" containerID="fdc364537b0a5cb838a7b83d0f14649acaa28c191fb76dac967641459c83adac" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.761217 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdc364537b0a5cb838a7b83d0f14649acaa28c191fb76dac967641459c83adac"} err="failed to get container status \"fdc364537b0a5cb838a7b83d0f14649acaa28c191fb76dac967641459c83adac\": rpc error: code = NotFound desc = could not find container \"fdc364537b0a5cb838a7b83d0f14649acaa28c191fb76dac967641459c83adac\": container with ID starting with fdc364537b0a5cb838a7b83d0f14649acaa28c191fb76dac967641459c83adac not found: ID does not exist" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.761257 4730 scope.go:117] "RemoveContainer" containerID="ace0bf4216c56b67458dd27514f850e9c4ca94d3fa487db289f01f806982fd04" Sep 30 09:52:50 crc kubenswrapper[4730]: E0930 09:52:50.761627 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ace0bf4216c56b67458dd27514f850e9c4ca94d3fa487db289f01f806982fd04\": container with ID starting with ace0bf4216c56b67458dd27514f850e9c4ca94d3fa487db289f01f806982fd04 not found: ID does not exist" containerID="ace0bf4216c56b67458dd27514f850e9c4ca94d3fa487db289f01f806982fd04" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.761655 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ace0bf4216c56b67458dd27514f850e9c4ca94d3fa487db289f01f806982fd04"} err="failed to get container status \"ace0bf4216c56b67458dd27514f850e9c4ca94d3fa487db289f01f806982fd04\": rpc error: code = NotFound desc = could not find container \"ace0bf4216c56b67458dd27514f850e9c4ca94d3fa487db289f01f806982fd04\": container with ID starting with ace0bf4216c56b67458dd27514f850e9c4ca94d3fa487db289f01f806982fd04 not found: ID does not exist" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.838581 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57751804-fe40-4d95-a5b2-529037d3ba17-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57751804-fe40-4d95-a5b2-529037d3ba17" (UID: "57751804-fe40-4d95-a5b2-529037d3ba17"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.852560 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57751804-fe40-4d95-a5b2-529037d3ba17-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.852604 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvwfp\" (UniqueName: \"kubernetes.io/projected/57751804-fe40-4d95-a5b2-529037d3ba17-kube-api-access-lvwfp\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.852638 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57751804-fe40-4d95-a5b2-529037d3ba17-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 09:52:50 crc kubenswrapper[4730]: I0930 09:52:50.998025 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-drr8f"] Sep 30 09:52:51 crc kubenswrapper[4730]: I0930 09:52:51.003053 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-drr8f"] Sep 30 09:52:52 crc kubenswrapper[4730]: I0930 09:52:52.387959 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57751804-fe40-4d95-a5b2-529037d3ba17" path="/var/lib/kubelet/pods/57751804-fe40-4d95-a5b2-529037d3ba17/volumes" Sep 30 09:52:52 crc kubenswrapper[4730]: I0930 09:52:52.389790 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6d72e26-75bd-4095-aae3-345a816de053" path="/var/lib/kubelet/pods/b6d72e26-75bd-4095-aae3-345a816de053/volumes" Sep 30 09:53:02 crc kubenswrapper[4730]: I0930 09:53:02.337097 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 09:53:02 crc kubenswrapper[4730]: I0930 09:53:02.337681 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 09:53:02 crc kubenswrapper[4730]: I0930 09:53:02.337740 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:53:02 crc kubenswrapper[4730]: I0930 09:53:02.338373 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 09:53:02 crc kubenswrapper[4730]: I0930 09:53:02.338432 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6" gracePeriod=600 Sep 30 09:53:02 crc kubenswrapper[4730]: I0930 09:53:02.745651 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6" exitCode=0 Sep 30 09:53:02 crc kubenswrapper[4730]: I0930 09:53:02.745726 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6"} Sep 30 09:53:02 crc kubenswrapper[4730]: I0930 09:53:02.745946 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"329b3ada6780543f63d35d6db2f1dc9bd16e3f7ca3e03a686699b3e9535f2065"} Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.562294 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" podUID="b495236a-11fa-48fb-9361-3c02fe062e4b" containerName="oauth-openshift" containerID="cri-o://d7af9e9acf05f0dd17224908aecedffc3be82c81e7e22d6d4d307a1a7c3e7c7d" gracePeriod=15 Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.796207 4730 generic.go:334] "Generic (PLEG): container finished" podID="b495236a-11fa-48fb-9361-3c02fe062e4b" containerID="d7af9e9acf05f0dd17224908aecedffc3be82c81e7e22d6d4d307a1a7c3e7c7d" exitCode=0 Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.796305 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" event={"ID":"b495236a-11fa-48fb-9361-3c02fe062e4b","Type":"ContainerDied","Data":"d7af9e9acf05f0dd17224908aecedffc3be82c81e7e22d6d4d307a1a7c3e7c7d"} Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.938287 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.974916 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-fdd74686d-k28ds"] Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975149 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f65dbc04-da69-4a9e-b564-bd39da6f5afe" containerName="pruner" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975162 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f65dbc04-da69-4a9e-b564-bd39da6f5afe" containerName="pruner" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975171 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b495236a-11fa-48fb-9361-3c02fe062e4b" containerName="oauth-openshift" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975177 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="b495236a-11fa-48fb-9361-3c02fe062e4b" containerName="oauth-openshift" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975183 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6d72e26-75bd-4095-aae3-345a816de053" containerName="extract-utilities" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975189 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6d72e26-75bd-4095-aae3-345a816de053" containerName="extract-utilities" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975199 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6d72e26-75bd-4095-aae3-345a816de053" containerName="extract-content" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975205 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6d72e26-75bd-4095-aae3-345a816de053" containerName="extract-content" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975214 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f9427a2-2872-43a6-9dfe-5754901b1043" containerName="pruner" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975220 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f9427a2-2872-43a6-9dfe-5754901b1043" containerName="pruner" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975228 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30fe799a-bbf1-4e62-8187-353da7ca9930" containerName="extract-utilities" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975235 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="30fe799a-bbf1-4e62-8187-353da7ca9930" containerName="extract-utilities" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975245 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57751804-fe40-4d95-a5b2-529037d3ba17" containerName="extract-utilities" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975251 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="57751804-fe40-4d95-a5b2-529037d3ba17" containerName="extract-utilities" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975260 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57751804-fe40-4d95-a5b2-529037d3ba17" containerName="registry-server" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975266 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="57751804-fe40-4d95-a5b2-529037d3ba17" containerName="registry-server" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975280 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30fe799a-bbf1-4e62-8187-353da7ca9930" containerName="registry-server" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975286 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="30fe799a-bbf1-4e62-8187-353da7ca9930" containerName="registry-server" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975298 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fe7d185-5cd3-4de1-a842-065620d27fdf" containerName="extract-content" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975305 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fe7d185-5cd3-4de1-a842-065620d27fdf" containerName="extract-content" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975314 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fe7d185-5cd3-4de1-a842-065620d27fdf" containerName="registry-server" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975321 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fe7d185-5cd3-4de1-a842-065620d27fdf" containerName="registry-server" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975334 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30fe799a-bbf1-4e62-8187-353da7ca9930" containerName="extract-content" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975341 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="30fe799a-bbf1-4e62-8187-353da7ca9930" containerName="extract-content" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975348 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fe7d185-5cd3-4de1-a842-065620d27fdf" containerName="extract-utilities" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975355 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fe7d185-5cd3-4de1-a842-065620d27fdf" containerName="extract-utilities" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975364 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6d72e26-75bd-4095-aae3-345a816de053" containerName="registry-server" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975373 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6d72e26-75bd-4095-aae3-345a816de053" containerName="registry-server" Sep 30 09:53:11 crc kubenswrapper[4730]: E0930 09:53:11.975383 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57751804-fe40-4d95-a5b2-529037d3ba17" containerName="extract-content" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975391 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="57751804-fe40-4d95-a5b2-529037d3ba17" containerName="extract-content" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975473 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="57751804-fe40-4d95-a5b2-529037d3ba17" containerName="registry-server" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975487 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6d72e26-75bd-4095-aae3-345a816de053" containerName="registry-server" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975497 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fe7d185-5cd3-4de1-a842-065620d27fdf" containerName="registry-server" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975505 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f9427a2-2872-43a6-9dfe-5754901b1043" containerName="pruner" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975513 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="b495236a-11fa-48fb-9361-3c02fe062e4b" containerName="oauth-openshift" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975521 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f65dbc04-da69-4a9e-b564-bd39da6f5afe" containerName="pruner" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975529 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="30fe799a-bbf1-4e62-8187-353da7ca9930" containerName="registry-server" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.975969 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:11 crc kubenswrapper[4730]: I0930 09:53:11.992067 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-fdd74686d-k28ds"] Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.013498 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-audit-policies\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.013570 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-serving-cert\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.013640 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-error\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.013685 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-idp-0-file-data\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.013704 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-login\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.013742 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-ocp-branding-template\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.013770 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-session\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.013829 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-trusted-ca-bundle\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.013856 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-provider-selection\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.013902 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-cliconfig\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.013927 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b495236a-11fa-48fb-9361-3c02fe062e4b-audit-dir\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.013963 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-router-certs\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.013986 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-service-ca\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.014012 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2922\" (UniqueName: \"kubernetes.io/projected/b495236a-11fa-48fb-9361-3c02fe062e4b-kube-api-access-b2922\") pod \"b495236a-11fa-48fb-9361-3c02fe062e4b\" (UID: \"b495236a-11fa-48fb-9361-3c02fe062e4b\") " Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.014693 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.014966 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b495236a-11fa-48fb-9361-3c02fe062e4b-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.015829 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.015927 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.016516 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.020989 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.022250 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.022379 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b495236a-11fa-48fb-9361-3c02fe062e4b-kube-api-access-b2922" (OuterVolumeSpecName: "kube-api-access-b2922") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "kube-api-access-b2922". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.022469 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.023156 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.023308 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.023489 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.023785 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.027823 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "b495236a-11fa-48fb-9361-3c02fe062e4b" (UID: "b495236a-11fa-48fb-9361-3c02fe062e4b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.116354 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4dd2a57d-d7d0-461e-a959-09ac733e0f37-audit-policies\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.116439 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbcml\" (UniqueName: \"kubernetes.io/projected/4dd2a57d-d7d0-461e-a959-09ac733e0f37-kube-api-access-zbcml\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.116694 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-cliconfig\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.116722 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.116779 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4dd2a57d-d7d0-461e-a959-09ac733e0f37-audit-dir\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.116829 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-service-ca\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.116858 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-user-template-error\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117043 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-user-template-login\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117201 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-router-certs\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117235 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117310 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117374 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-session\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117405 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117435 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-serving-cert\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117516 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117531 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117544 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117568 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117587 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117599 4730 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b495236a-11fa-48fb-9361-3c02fe062e4b-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117649 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117660 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117671 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2922\" (UniqueName: \"kubernetes.io/projected/b495236a-11fa-48fb-9361-3c02fe062e4b-kube-api-access-b2922\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117684 4730 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b495236a-11fa-48fb-9361-3c02fe062e4b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117697 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117728 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117740 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.117750 4730 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b495236a-11fa-48fb-9361-3c02fe062e4b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219329 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219408 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-session\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219438 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219470 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-serving-cert\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219499 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4dd2a57d-d7d0-461e-a959-09ac733e0f37-audit-policies\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219526 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbcml\" (UniqueName: \"kubernetes.io/projected/4dd2a57d-d7d0-461e-a959-09ac733e0f37-kube-api-access-zbcml\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219569 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-cliconfig\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219590 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219634 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4dd2a57d-d7d0-461e-a959-09ac733e0f37-audit-dir\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219675 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-service-ca\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219700 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-user-template-error\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219738 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-user-template-login\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219767 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-router-certs\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.219789 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.220387 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4dd2a57d-d7d0-461e-a959-09ac733e0f37-audit-dir\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.220888 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4dd2a57d-d7d0-461e-a959-09ac733e0f37-audit-policies\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.220866 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-service-ca\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.221489 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-cliconfig\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.222496 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.224964 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-user-template-login\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.224994 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.225342 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-router-certs\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.225427 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-user-template-error\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.225901 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.227626 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-session\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.227729 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.230189 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4dd2a57d-d7d0-461e-a959-09ac733e0f37-v4-0-config-system-serving-cert\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.234896 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbcml\" (UniqueName: \"kubernetes.io/projected/4dd2a57d-d7d0-461e-a959-09ac733e0f37-kube-api-access-zbcml\") pod \"oauth-openshift-fdd74686d-k28ds\" (UID: \"4dd2a57d-d7d0-461e-a959-09ac733e0f37\") " pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.311024 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.735736 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-fdd74686d-k28ds"] Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.804525 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" event={"ID":"4dd2a57d-d7d0-461e-a959-09ac733e0f37","Type":"ContainerStarted","Data":"9400459926fbc2d51de3b025111049d8d7f07d5266ac00b49fadb84da6ef1e52"} Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.806534 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" event={"ID":"b495236a-11fa-48fb-9361-3c02fe062e4b","Type":"ContainerDied","Data":"2864527e5f6d07b051e16a793960fc187558d7e9c691c4dfe2a9307641ab9a0d"} Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.806574 4730 scope.go:117] "RemoveContainer" containerID="d7af9e9acf05f0dd17224908aecedffc3be82c81e7e22d6d4d307a1a7c3e7c7d" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.806680 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-r8tph" Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.826195 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-r8tph"] Sep 30 09:53:12 crc kubenswrapper[4730]: I0930 09:53:12.828574 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-r8tph"] Sep 30 09:53:13 crc kubenswrapper[4730]: I0930 09:53:13.813402 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" event={"ID":"4dd2a57d-d7d0-461e-a959-09ac733e0f37","Type":"ContainerStarted","Data":"114418891bac5985f53d4b8f20e86e0dcf70afc2365333048457d24ca276064f"} Sep 30 09:53:13 crc kubenswrapper[4730]: I0930 09:53:13.813803 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:13 crc kubenswrapper[4730]: I0930 09:53:13.819778 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" Sep 30 09:53:13 crc kubenswrapper[4730]: I0930 09:53:13.832487 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-fdd74686d-k28ds" podStartSLOduration=27.832468666 podStartE2EDuration="27.832468666s" podCreationTimestamp="2025-09-30 09:52:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:53:13.83140642 +0000 UTC m=+238.164666413" watchObservedRunningTime="2025-09-30 09:53:13.832468666 +0000 UTC m=+238.165728659" Sep 30 09:53:14 crc kubenswrapper[4730]: I0930 09:53:14.388099 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b495236a-11fa-48fb-9361-3c02fe062e4b" path="/var/lib/kubelet/pods/b495236a-11fa-48fb-9361-3c02fe062e4b/volumes" Sep 30 09:53:39 crc kubenswrapper[4730]: I0930 09:53:39.921469 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8qhvr"] Sep 30 09:53:39 crc kubenswrapper[4730]: I0930 09:53:39.922362 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8qhvr" podUID="cd78d8d1-8617-4c3d-9205-abd8bbdde710" containerName="registry-server" containerID="cri-o://aa87a7828f88f9712358db9846d27c69240d9060ecb201c3a63761fa2827f7ef" gracePeriod=30 Sep 30 09:53:39 crc kubenswrapper[4730]: I0930 09:53:39.930976 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fnbq4"] Sep 30 09:53:39 crc kubenswrapper[4730]: I0930 09:53:39.931226 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fnbq4" podUID="9ea0a817-d369-46fc-9e35-ab227abcbf25" containerName="registry-server" containerID="cri-o://7a4872c1de0676f328202646734b87e601bf87a09db7059c0ce843233070c99d" gracePeriod=30 Sep 30 09:53:39 crc kubenswrapper[4730]: I0930 09:53:39.940691 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nt7nq"] Sep 30 09:53:39 crc kubenswrapper[4730]: I0930 09:53:39.940985 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" podUID="b437c48c-f825-4a51-9076-acf5dcd25e36" containerName="marketplace-operator" containerID="cri-o://67a0db591a5ac5da6e2caa2d9b76dd4067616304a38ee48c82db2ccbb8028ec5" gracePeriod=30 Sep 30 09:53:39 crc kubenswrapper[4730]: I0930 09:53:39.944437 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8srlp"] Sep 30 09:53:39 crc kubenswrapper[4730]: I0930 09:53:39.944705 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8srlp" podUID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" containerName="registry-server" containerID="cri-o://076200e1ceb190f3029eab12693d4076afbdea446ff459c308e7de87ad3331f3" gracePeriod=30 Sep 30 09:53:39 crc kubenswrapper[4730]: I0930 09:53:39.958757 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6gt5z"] Sep 30 09:53:39 crc kubenswrapper[4730]: I0930 09:53:39.959752 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" Sep 30 09:53:39 crc kubenswrapper[4730]: I0930 09:53:39.968795 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5g8hq"] Sep 30 09:53:39 crc kubenswrapper[4730]: I0930 09:53:39.969120 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5g8hq" podUID="57333b74-aac2-4b1b-af18-e4dece554edb" containerName="registry-server" containerID="cri-o://ce3642118be30648d075448cc24aed002b9f6c61fba1bc57687097b763091ecc" gracePeriod=30 Sep 30 09:53:39 crc kubenswrapper[4730]: I0930 09:53:39.981927 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6gt5z"] Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.087485 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a28d8376-b2f6-44da-b872-34bd96b74108-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6gt5z\" (UID: \"a28d8376-b2f6-44da-b872-34bd96b74108\") " pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.087587 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c5sm\" (UniqueName: \"kubernetes.io/projected/a28d8376-b2f6-44da-b872-34bd96b74108-kube-api-access-9c5sm\") pod \"marketplace-operator-79b997595-6gt5z\" (UID: \"a28d8376-b2f6-44da-b872-34bd96b74108\") " pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.087667 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a28d8376-b2f6-44da-b872-34bd96b74108-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6gt5z\" (UID: \"a28d8376-b2f6-44da-b872-34bd96b74108\") " pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.189129 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a28d8376-b2f6-44da-b872-34bd96b74108-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6gt5z\" (UID: \"a28d8376-b2f6-44da-b872-34bd96b74108\") " pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.189283 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c5sm\" (UniqueName: \"kubernetes.io/projected/a28d8376-b2f6-44da-b872-34bd96b74108-kube-api-access-9c5sm\") pod \"marketplace-operator-79b997595-6gt5z\" (UID: \"a28d8376-b2f6-44da-b872-34bd96b74108\") " pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.189335 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a28d8376-b2f6-44da-b872-34bd96b74108-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6gt5z\" (UID: \"a28d8376-b2f6-44da-b872-34bd96b74108\") " pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.191877 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a28d8376-b2f6-44da-b872-34bd96b74108-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6gt5z\" (UID: \"a28d8376-b2f6-44da-b872-34bd96b74108\") " pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.202491 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a28d8376-b2f6-44da-b872-34bd96b74108-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6gt5z\" (UID: \"a28d8376-b2f6-44da-b872-34bd96b74108\") " pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.214114 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c5sm\" (UniqueName: \"kubernetes.io/projected/a28d8376-b2f6-44da-b872-34bd96b74108-kube-api-access-9c5sm\") pod \"marketplace-operator-79b997595-6gt5z\" (UID: \"a28d8376-b2f6-44da-b872-34bd96b74108\") " pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.285184 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.448444 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.495772 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.499780 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.506912 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.508130 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.594405 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bllk\" (UniqueName: \"kubernetes.io/projected/57333b74-aac2-4b1b-af18-e4dece554edb-kube-api-access-2bllk\") pod \"57333b74-aac2-4b1b-af18-e4dece554edb\" (UID: \"57333b74-aac2-4b1b-af18-e4dece554edb\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.594496 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b437c48c-f825-4a51-9076-acf5dcd25e36-marketplace-operator-metrics\") pod \"b437c48c-f825-4a51-9076-acf5dcd25e36\" (UID: \"b437c48c-f825-4a51-9076-acf5dcd25e36\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.594586 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ea0a817-d369-46fc-9e35-ab227abcbf25-catalog-content\") pod \"9ea0a817-d369-46fc-9e35-ab227abcbf25\" (UID: \"9ea0a817-d369-46fc-9e35-ab227abcbf25\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.594636 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/297e4d9f-18c2-4e11-a89b-11df93cba4ef-utilities\") pod \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\" (UID: \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.594675 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84k94\" (UniqueName: \"kubernetes.io/projected/cd78d8d1-8617-4c3d-9205-abd8bbdde710-kube-api-access-84k94\") pod \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\" (UID: \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.594715 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/297e4d9f-18c2-4e11-a89b-11df93cba4ef-catalog-content\") pod \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\" (UID: \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.594753 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4xfm\" (UniqueName: \"kubernetes.io/projected/9ea0a817-d369-46fc-9e35-ab227abcbf25-kube-api-access-q4xfm\") pod \"9ea0a817-d369-46fc-9e35-ab227abcbf25\" (UID: \"9ea0a817-d369-46fc-9e35-ab227abcbf25\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.594794 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsmsb\" (UniqueName: \"kubernetes.io/projected/297e4d9f-18c2-4e11-a89b-11df93cba4ef-kube-api-access-vsmsb\") pod \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\" (UID: \"297e4d9f-18c2-4e11-a89b-11df93cba4ef\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.594818 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ea0a817-d369-46fc-9e35-ab227abcbf25-utilities\") pod \"9ea0a817-d369-46fc-9e35-ab227abcbf25\" (UID: \"9ea0a817-d369-46fc-9e35-ab227abcbf25\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.594848 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57333b74-aac2-4b1b-af18-e4dece554edb-utilities\") pod \"57333b74-aac2-4b1b-af18-e4dece554edb\" (UID: \"57333b74-aac2-4b1b-af18-e4dece554edb\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.594888 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57333b74-aac2-4b1b-af18-e4dece554edb-catalog-content\") pod \"57333b74-aac2-4b1b-af18-e4dece554edb\" (UID: \"57333b74-aac2-4b1b-af18-e4dece554edb\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.595347 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b437c48c-f825-4a51-9076-acf5dcd25e36-marketplace-trusted-ca\") pod \"b437c48c-f825-4a51-9076-acf5dcd25e36\" (UID: \"b437c48c-f825-4a51-9076-acf5dcd25e36\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.595396 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cx7zs\" (UniqueName: \"kubernetes.io/projected/b437c48c-f825-4a51-9076-acf5dcd25e36-kube-api-access-cx7zs\") pod \"b437c48c-f825-4a51-9076-acf5dcd25e36\" (UID: \"b437c48c-f825-4a51-9076-acf5dcd25e36\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.595426 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd78d8d1-8617-4c3d-9205-abd8bbdde710-utilities\") pod \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\" (UID: \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.595455 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd78d8d1-8617-4c3d-9205-abd8bbdde710-catalog-content\") pod \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\" (UID: \"cd78d8d1-8617-4c3d-9205-abd8bbdde710\") " Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.605475 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ea0a817-d369-46fc-9e35-ab227abcbf25-utilities" (OuterVolumeSpecName: "utilities") pod "9ea0a817-d369-46fc-9e35-ab227abcbf25" (UID: "9ea0a817-d369-46fc-9e35-ab227abcbf25"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.607632 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57333b74-aac2-4b1b-af18-e4dece554edb-utilities" (OuterVolumeSpecName: "utilities") pod "57333b74-aac2-4b1b-af18-e4dece554edb" (UID: "57333b74-aac2-4b1b-af18-e4dece554edb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.610382 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b437c48c-f825-4a51-9076-acf5dcd25e36-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b437c48c-f825-4a51-9076-acf5dcd25e36" (UID: "b437c48c-f825-4a51-9076-acf5dcd25e36"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.610490 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd78d8d1-8617-4c3d-9205-abd8bbdde710-utilities" (OuterVolumeSpecName: "utilities") pod "cd78d8d1-8617-4c3d-9205-abd8bbdde710" (UID: "cd78d8d1-8617-4c3d-9205-abd8bbdde710"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.612740 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/297e4d9f-18c2-4e11-a89b-11df93cba4ef-utilities" (OuterVolumeSpecName: "utilities") pod "297e4d9f-18c2-4e11-a89b-11df93cba4ef" (UID: "297e4d9f-18c2-4e11-a89b-11df93cba4ef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.614055 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b437c48c-f825-4a51-9076-acf5dcd25e36-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b437c48c-f825-4a51-9076-acf5dcd25e36" (UID: "b437c48c-f825-4a51-9076-acf5dcd25e36"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.614225 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd78d8d1-8617-4c3d-9205-abd8bbdde710-kube-api-access-84k94" (OuterVolumeSpecName: "kube-api-access-84k94") pod "cd78d8d1-8617-4c3d-9205-abd8bbdde710" (UID: "cd78d8d1-8617-4c3d-9205-abd8bbdde710"). InnerVolumeSpecName "kube-api-access-84k94". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.614340 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57333b74-aac2-4b1b-af18-e4dece554edb-kube-api-access-2bllk" (OuterVolumeSpecName: "kube-api-access-2bllk") pod "57333b74-aac2-4b1b-af18-e4dece554edb" (UID: "57333b74-aac2-4b1b-af18-e4dece554edb"). InnerVolumeSpecName "kube-api-access-2bllk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.616301 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/297e4d9f-18c2-4e11-a89b-11df93cba4ef-kube-api-access-vsmsb" (OuterVolumeSpecName: "kube-api-access-vsmsb") pod "297e4d9f-18c2-4e11-a89b-11df93cba4ef" (UID: "297e4d9f-18c2-4e11-a89b-11df93cba4ef"). InnerVolumeSpecName "kube-api-access-vsmsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.619591 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ea0a817-d369-46fc-9e35-ab227abcbf25-kube-api-access-q4xfm" (OuterVolumeSpecName: "kube-api-access-q4xfm") pod "9ea0a817-d369-46fc-9e35-ab227abcbf25" (UID: "9ea0a817-d369-46fc-9e35-ab227abcbf25"). InnerVolumeSpecName "kube-api-access-q4xfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.620250 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b437c48c-f825-4a51-9076-acf5dcd25e36-kube-api-access-cx7zs" (OuterVolumeSpecName: "kube-api-access-cx7zs") pod "b437c48c-f825-4a51-9076-acf5dcd25e36" (UID: "b437c48c-f825-4a51-9076-acf5dcd25e36"). InnerVolumeSpecName "kube-api-access-cx7zs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.640264 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/297e4d9f-18c2-4e11-a89b-11df93cba4ef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "297e4d9f-18c2-4e11-a89b-11df93cba4ef" (UID: "297e4d9f-18c2-4e11-a89b-11df93cba4ef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.664532 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd78d8d1-8617-4c3d-9205-abd8bbdde710-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cd78d8d1-8617-4c3d-9205-abd8bbdde710" (UID: "cd78d8d1-8617-4c3d-9205-abd8bbdde710"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.666241 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ea0a817-d369-46fc-9e35-ab227abcbf25-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ea0a817-d369-46fc-9e35-ab227abcbf25" (UID: "9ea0a817-d369-46fc-9e35-ab227abcbf25"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696458 4730 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b437c48c-f825-4a51-9076-acf5dcd25e36-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696499 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cx7zs\" (UniqueName: \"kubernetes.io/projected/b437c48c-f825-4a51-9076-acf5dcd25e36-kube-api-access-cx7zs\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696510 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd78d8d1-8617-4c3d-9205-abd8bbdde710-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696524 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd78d8d1-8617-4c3d-9205-abd8bbdde710-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696535 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bllk\" (UniqueName: \"kubernetes.io/projected/57333b74-aac2-4b1b-af18-e4dece554edb-kube-api-access-2bllk\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696546 4730 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b437c48c-f825-4a51-9076-acf5dcd25e36-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696561 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ea0a817-d369-46fc-9e35-ab227abcbf25-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696573 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/297e4d9f-18c2-4e11-a89b-11df93cba4ef-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696584 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84k94\" (UniqueName: \"kubernetes.io/projected/cd78d8d1-8617-4c3d-9205-abd8bbdde710-kube-api-access-84k94\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696592 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/297e4d9f-18c2-4e11-a89b-11df93cba4ef-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696602 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4xfm\" (UniqueName: \"kubernetes.io/projected/9ea0a817-d369-46fc-9e35-ab227abcbf25-kube-api-access-q4xfm\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696626 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ea0a817-d369-46fc-9e35-ab227abcbf25-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696637 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsmsb\" (UniqueName: \"kubernetes.io/projected/297e4d9f-18c2-4e11-a89b-11df93cba4ef-kube-api-access-vsmsb\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.696649 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57333b74-aac2-4b1b-af18-e4dece554edb-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.720721 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57333b74-aac2-4b1b-af18-e4dece554edb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57333b74-aac2-4b1b-af18-e4dece554edb" (UID: "57333b74-aac2-4b1b-af18-e4dece554edb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.775069 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6gt5z"] Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.798248 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57333b74-aac2-4b1b-af18-e4dece554edb-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.969954 4730 generic.go:334] "Generic (PLEG): container finished" podID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" containerID="076200e1ceb190f3029eab12693d4076afbdea446ff459c308e7de87ad3331f3" exitCode=0 Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.970022 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8srlp" event={"ID":"297e4d9f-18c2-4e11-a89b-11df93cba4ef","Type":"ContainerDied","Data":"076200e1ceb190f3029eab12693d4076afbdea446ff459c308e7de87ad3331f3"} Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.970052 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8srlp" event={"ID":"297e4d9f-18c2-4e11-a89b-11df93cba4ef","Type":"ContainerDied","Data":"9bcabb30039f75d4670167f4ca1a270216e2ba169fce17eaac84854f8118f33f"} Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.970072 4730 scope.go:117] "RemoveContainer" containerID="076200e1ceb190f3029eab12693d4076afbdea446ff459c308e7de87ad3331f3" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.970220 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8srlp" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.972596 4730 generic.go:334] "Generic (PLEG): container finished" podID="b437c48c-f825-4a51-9076-acf5dcd25e36" containerID="67a0db591a5ac5da6e2caa2d9b76dd4067616304a38ee48c82db2ccbb8028ec5" exitCode=0 Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.972651 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" event={"ID":"b437c48c-f825-4a51-9076-acf5dcd25e36","Type":"ContainerDied","Data":"67a0db591a5ac5da6e2caa2d9b76dd4067616304a38ee48c82db2ccbb8028ec5"} Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.972690 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.972710 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-nt7nq" event={"ID":"b437c48c-f825-4a51-9076-acf5dcd25e36","Type":"ContainerDied","Data":"1327e7cc767626419db36e899ee267dd820eac7c4c930014bc47e025d0fd8b73"} Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.974674 4730 generic.go:334] "Generic (PLEG): container finished" podID="9ea0a817-d369-46fc-9e35-ab227abcbf25" containerID="7a4872c1de0676f328202646734b87e601bf87a09db7059c0ce843233070c99d" exitCode=0 Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.974732 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fnbq4" event={"ID":"9ea0a817-d369-46fc-9e35-ab227abcbf25","Type":"ContainerDied","Data":"7a4872c1de0676f328202646734b87e601bf87a09db7059c0ce843233070c99d"} Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.974750 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fnbq4" event={"ID":"9ea0a817-d369-46fc-9e35-ab227abcbf25","Type":"ContainerDied","Data":"55a2e0adc3faa400b8585bf18bebd20608c22943fd3f66fc704ced8189781262"} Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.974787 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fnbq4" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.981460 4730 generic.go:334] "Generic (PLEG): container finished" podID="cd78d8d1-8617-4c3d-9205-abd8bbdde710" containerID="aa87a7828f88f9712358db9846d27c69240d9060ecb201c3a63761fa2827f7ef" exitCode=0 Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.981515 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8qhvr" event={"ID":"cd78d8d1-8617-4c3d-9205-abd8bbdde710","Type":"ContainerDied","Data":"aa87a7828f88f9712358db9846d27c69240d9060ecb201c3a63761fa2827f7ef"} Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.981543 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8qhvr" event={"ID":"cd78d8d1-8617-4c3d-9205-abd8bbdde710","Type":"ContainerDied","Data":"172d388c58b194e9773a4bcb28a549a1b5f2baa00a80bb6a7ce08a08df4a7ca1"} Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.981655 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8qhvr" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.987828 4730 scope.go:117] "RemoveContainer" containerID="2b249965378a3268bdf440753f9456189591ef744854c5a1637c9ed667704027" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.990731 4730 generic.go:334] "Generic (PLEG): container finished" podID="57333b74-aac2-4b1b-af18-e4dece554edb" containerID="ce3642118be30648d075448cc24aed002b9f6c61fba1bc57687097b763091ecc" exitCode=0 Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.990800 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5g8hq" event={"ID":"57333b74-aac2-4b1b-af18-e4dece554edb","Type":"ContainerDied","Data":"ce3642118be30648d075448cc24aed002b9f6c61fba1bc57687097b763091ecc"} Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.990830 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5g8hq" event={"ID":"57333b74-aac2-4b1b-af18-e4dece554edb","Type":"ContainerDied","Data":"a5f37e8709017513fa7dcca88ef358f0f473cbed1ad8964a5d7055fcaa2f0a13"} Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.990833 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5g8hq" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.992908 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" event={"ID":"a28d8376-b2f6-44da-b872-34bd96b74108","Type":"ContainerStarted","Data":"5617f83f1cd80aec0a1f6597816c86d84fda81cceb486de989469576db2ab7b7"} Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.992947 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" event={"ID":"a28d8376-b2f6-44da-b872-34bd96b74108","Type":"ContainerStarted","Data":"47c0496b44c6667e9238cf2e5f1e422876a5a3c9eecab99d4dfbf297cdfe4b05"} Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.993575 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.994487 4730 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6gt5z container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" start-of-body= Sep 30 09:53:40 crc kubenswrapper[4730]: I0930 09:53:40.994533 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" podUID="a28d8376-b2f6-44da-b872-34bd96b74108" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.015128 4730 scope.go:117] "RemoveContainer" containerID="07663d8f06a233839c7a81e64fc3b252e93dce3596ef18b41c0d0b263ffe348a" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.015170 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" podStartSLOduration=2.015138471 podStartE2EDuration="2.015138471s" podCreationTimestamp="2025-09-30 09:53:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:53:41.011927664 +0000 UTC m=+265.345187707" watchObservedRunningTime="2025-09-30 09:53:41.015138471 +0000 UTC m=+265.348398464" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.040922 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nt7nq"] Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.041705 4730 scope.go:117] "RemoveContainer" containerID="076200e1ceb190f3029eab12693d4076afbdea446ff459c308e7de87ad3331f3" Sep 30 09:53:41 crc kubenswrapper[4730]: E0930 09:53:41.042489 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"076200e1ceb190f3029eab12693d4076afbdea446ff459c308e7de87ad3331f3\": container with ID starting with 076200e1ceb190f3029eab12693d4076afbdea446ff459c308e7de87ad3331f3 not found: ID does not exist" containerID="076200e1ceb190f3029eab12693d4076afbdea446ff459c308e7de87ad3331f3" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.042527 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"076200e1ceb190f3029eab12693d4076afbdea446ff459c308e7de87ad3331f3"} err="failed to get container status \"076200e1ceb190f3029eab12693d4076afbdea446ff459c308e7de87ad3331f3\": rpc error: code = NotFound desc = could not find container \"076200e1ceb190f3029eab12693d4076afbdea446ff459c308e7de87ad3331f3\": container with ID starting with 076200e1ceb190f3029eab12693d4076afbdea446ff459c308e7de87ad3331f3 not found: ID does not exist" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.042554 4730 scope.go:117] "RemoveContainer" containerID="2b249965378a3268bdf440753f9456189591ef744854c5a1637c9ed667704027" Sep 30 09:53:41 crc kubenswrapper[4730]: E0930 09:53:41.044802 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b249965378a3268bdf440753f9456189591ef744854c5a1637c9ed667704027\": container with ID starting with 2b249965378a3268bdf440753f9456189591ef744854c5a1637c9ed667704027 not found: ID does not exist" containerID="2b249965378a3268bdf440753f9456189591ef744854c5a1637c9ed667704027" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.044831 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b249965378a3268bdf440753f9456189591ef744854c5a1637c9ed667704027"} err="failed to get container status \"2b249965378a3268bdf440753f9456189591ef744854c5a1637c9ed667704027\": rpc error: code = NotFound desc = could not find container \"2b249965378a3268bdf440753f9456189591ef744854c5a1637c9ed667704027\": container with ID starting with 2b249965378a3268bdf440753f9456189591ef744854c5a1637c9ed667704027 not found: ID does not exist" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.044844 4730 scope.go:117] "RemoveContainer" containerID="07663d8f06a233839c7a81e64fc3b252e93dce3596ef18b41c0d0b263ffe348a" Sep 30 09:53:41 crc kubenswrapper[4730]: E0930 09:53:41.045845 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07663d8f06a233839c7a81e64fc3b252e93dce3596ef18b41c0d0b263ffe348a\": container with ID starting with 07663d8f06a233839c7a81e64fc3b252e93dce3596ef18b41c0d0b263ffe348a not found: ID does not exist" containerID="07663d8f06a233839c7a81e64fc3b252e93dce3596ef18b41c0d0b263ffe348a" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.045885 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07663d8f06a233839c7a81e64fc3b252e93dce3596ef18b41c0d0b263ffe348a"} err="failed to get container status \"07663d8f06a233839c7a81e64fc3b252e93dce3596ef18b41c0d0b263ffe348a\": rpc error: code = NotFound desc = could not find container \"07663d8f06a233839c7a81e64fc3b252e93dce3596ef18b41c0d0b263ffe348a\": container with ID starting with 07663d8f06a233839c7a81e64fc3b252e93dce3596ef18b41c0d0b263ffe348a not found: ID does not exist" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.045906 4730 scope.go:117] "RemoveContainer" containerID="67a0db591a5ac5da6e2caa2d9b76dd4067616304a38ee48c82db2ccbb8028ec5" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.047461 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-nt7nq"] Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.053070 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fnbq4"] Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.060107 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fnbq4"] Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.061601 4730 scope.go:117] "RemoveContainer" containerID="67a0db591a5ac5da6e2caa2d9b76dd4067616304a38ee48c82db2ccbb8028ec5" Sep 30 09:53:41 crc kubenswrapper[4730]: E0930 09:53:41.062421 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67a0db591a5ac5da6e2caa2d9b76dd4067616304a38ee48c82db2ccbb8028ec5\": container with ID starting with 67a0db591a5ac5da6e2caa2d9b76dd4067616304a38ee48c82db2ccbb8028ec5 not found: ID does not exist" containerID="67a0db591a5ac5da6e2caa2d9b76dd4067616304a38ee48c82db2ccbb8028ec5" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.062476 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67a0db591a5ac5da6e2caa2d9b76dd4067616304a38ee48c82db2ccbb8028ec5"} err="failed to get container status \"67a0db591a5ac5da6e2caa2d9b76dd4067616304a38ee48c82db2ccbb8028ec5\": rpc error: code = NotFound desc = could not find container \"67a0db591a5ac5da6e2caa2d9b76dd4067616304a38ee48c82db2ccbb8028ec5\": container with ID starting with 67a0db591a5ac5da6e2caa2d9b76dd4067616304a38ee48c82db2ccbb8028ec5 not found: ID does not exist" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.062509 4730 scope.go:117] "RemoveContainer" containerID="7a4872c1de0676f328202646734b87e601bf87a09db7059c0ce843233070c99d" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.065227 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8qhvr"] Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.077101 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8qhvr"] Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.079428 4730 scope.go:117] "RemoveContainer" containerID="111dfe08a8f5e9afa79eadf369cdea9b1d6a0d7b6ad7f96191c0e34c5aa149fb" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.079807 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8srlp"] Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.088421 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8srlp"] Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.092958 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5g8hq"] Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.101689 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5g8hq"] Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.129409 4730 scope.go:117] "RemoveContainer" containerID="776418ea5720644edcbda23ad765d15fecd354331babf1927c2b8d2076015452" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.147799 4730 scope.go:117] "RemoveContainer" containerID="7a4872c1de0676f328202646734b87e601bf87a09db7059c0ce843233070c99d" Sep 30 09:53:41 crc kubenswrapper[4730]: E0930 09:53:41.148554 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a4872c1de0676f328202646734b87e601bf87a09db7059c0ce843233070c99d\": container with ID starting with 7a4872c1de0676f328202646734b87e601bf87a09db7059c0ce843233070c99d not found: ID does not exist" containerID="7a4872c1de0676f328202646734b87e601bf87a09db7059c0ce843233070c99d" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.148645 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a4872c1de0676f328202646734b87e601bf87a09db7059c0ce843233070c99d"} err="failed to get container status \"7a4872c1de0676f328202646734b87e601bf87a09db7059c0ce843233070c99d\": rpc error: code = NotFound desc = could not find container \"7a4872c1de0676f328202646734b87e601bf87a09db7059c0ce843233070c99d\": container with ID starting with 7a4872c1de0676f328202646734b87e601bf87a09db7059c0ce843233070c99d not found: ID does not exist" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.148701 4730 scope.go:117] "RemoveContainer" containerID="111dfe08a8f5e9afa79eadf369cdea9b1d6a0d7b6ad7f96191c0e34c5aa149fb" Sep 30 09:53:41 crc kubenswrapper[4730]: E0930 09:53:41.149786 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"111dfe08a8f5e9afa79eadf369cdea9b1d6a0d7b6ad7f96191c0e34c5aa149fb\": container with ID starting with 111dfe08a8f5e9afa79eadf369cdea9b1d6a0d7b6ad7f96191c0e34c5aa149fb not found: ID does not exist" containerID="111dfe08a8f5e9afa79eadf369cdea9b1d6a0d7b6ad7f96191c0e34c5aa149fb" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.149828 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"111dfe08a8f5e9afa79eadf369cdea9b1d6a0d7b6ad7f96191c0e34c5aa149fb"} err="failed to get container status \"111dfe08a8f5e9afa79eadf369cdea9b1d6a0d7b6ad7f96191c0e34c5aa149fb\": rpc error: code = NotFound desc = could not find container \"111dfe08a8f5e9afa79eadf369cdea9b1d6a0d7b6ad7f96191c0e34c5aa149fb\": container with ID starting with 111dfe08a8f5e9afa79eadf369cdea9b1d6a0d7b6ad7f96191c0e34c5aa149fb not found: ID does not exist" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.149857 4730 scope.go:117] "RemoveContainer" containerID="776418ea5720644edcbda23ad765d15fecd354331babf1927c2b8d2076015452" Sep 30 09:53:41 crc kubenswrapper[4730]: E0930 09:53:41.150246 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"776418ea5720644edcbda23ad765d15fecd354331babf1927c2b8d2076015452\": container with ID starting with 776418ea5720644edcbda23ad765d15fecd354331babf1927c2b8d2076015452 not found: ID does not exist" containerID="776418ea5720644edcbda23ad765d15fecd354331babf1927c2b8d2076015452" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.150293 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"776418ea5720644edcbda23ad765d15fecd354331babf1927c2b8d2076015452"} err="failed to get container status \"776418ea5720644edcbda23ad765d15fecd354331babf1927c2b8d2076015452\": rpc error: code = NotFound desc = could not find container \"776418ea5720644edcbda23ad765d15fecd354331babf1927c2b8d2076015452\": container with ID starting with 776418ea5720644edcbda23ad765d15fecd354331babf1927c2b8d2076015452 not found: ID does not exist" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.150322 4730 scope.go:117] "RemoveContainer" containerID="aa87a7828f88f9712358db9846d27c69240d9060ecb201c3a63761fa2827f7ef" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.174978 4730 scope.go:117] "RemoveContainer" containerID="038740d5913ebafff9735e66423a0b5a4547372d92a89e51d2a6ead3783ee6f1" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.202875 4730 scope.go:117] "RemoveContainer" containerID="6c116a99bd8ab138907566ca989fb722795edd4380180a47ae5b42370c0593c6" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.222335 4730 scope.go:117] "RemoveContainer" containerID="aa87a7828f88f9712358db9846d27c69240d9060ecb201c3a63761fa2827f7ef" Sep 30 09:53:41 crc kubenswrapper[4730]: E0930 09:53:41.223065 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa87a7828f88f9712358db9846d27c69240d9060ecb201c3a63761fa2827f7ef\": container with ID starting with aa87a7828f88f9712358db9846d27c69240d9060ecb201c3a63761fa2827f7ef not found: ID does not exist" containerID="aa87a7828f88f9712358db9846d27c69240d9060ecb201c3a63761fa2827f7ef" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.223126 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa87a7828f88f9712358db9846d27c69240d9060ecb201c3a63761fa2827f7ef"} err="failed to get container status \"aa87a7828f88f9712358db9846d27c69240d9060ecb201c3a63761fa2827f7ef\": rpc error: code = NotFound desc = could not find container \"aa87a7828f88f9712358db9846d27c69240d9060ecb201c3a63761fa2827f7ef\": container with ID starting with aa87a7828f88f9712358db9846d27c69240d9060ecb201c3a63761fa2827f7ef not found: ID does not exist" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.223167 4730 scope.go:117] "RemoveContainer" containerID="038740d5913ebafff9735e66423a0b5a4547372d92a89e51d2a6ead3783ee6f1" Sep 30 09:53:41 crc kubenswrapper[4730]: E0930 09:53:41.224148 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"038740d5913ebafff9735e66423a0b5a4547372d92a89e51d2a6ead3783ee6f1\": container with ID starting with 038740d5913ebafff9735e66423a0b5a4547372d92a89e51d2a6ead3783ee6f1 not found: ID does not exist" containerID="038740d5913ebafff9735e66423a0b5a4547372d92a89e51d2a6ead3783ee6f1" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.224216 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"038740d5913ebafff9735e66423a0b5a4547372d92a89e51d2a6ead3783ee6f1"} err="failed to get container status \"038740d5913ebafff9735e66423a0b5a4547372d92a89e51d2a6ead3783ee6f1\": rpc error: code = NotFound desc = could not find container \"038740d5913ebafff9735e66423a0b5a4547372d92a89e51d2a6ead3783ee6f1\": container with ID starting with 038740d5913ebafff9735e66423a0b5a4547372d92a89e51d2a6ead3783ee6f1 not found: ID does not exist" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.224249 4730 scope.go:117] "RemoveContainer" containerID="6c116a99bd8ab138907566ca989fb722795edd4380180a47ae5b42370c0593c6" Sep 30 09:53:41 crc kubenswrapper[4730]: E0930 09:53:41.225088 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c116a99bd8ab138907566ca989fb722795edd4380180a47ae5b42370c0593c6\": container with ID starting with 6c116a99bd8ab138907566ca989fb722795edd4380180a47ae5b42370c0593c6 not found: ID does not exist" containerID="6c116a99bd8ab138907566ca989fb722795edd4380180a47ae5b42370c0593c6" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.225120 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c116a99bd8ab138907566ca989fb722795edd4380180a47ae5b42370c0593c6"} err="failed to get container status \"6c116a99bd8ab138907566ca989fb722795edd4380180a47ae5b42370c0593c6\": rpc error: code = NotFound desc = could not find container \"6c116a99bd8ab138907566ca989fb722795edd4380180a47ae5b42370c0593c6\": container with ID starting with 6c116a99bd8ab138907566ca989fb722795edd4380180a47ae5b42370c0593c6 not found: ID does not exist" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.225139 4730 scope.go:117] "RemoveContainer" containerID="ce3642118be30648d075448cc24aed002b9f6c61fba1bc57687097b763091ecc" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.243112 4730 scope.go:117] "RemoveContainer" containerID="023ff43605a0395a68aaa779ba5dc415d6df8a3a8d37858e4705dfaaca9dc805" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.265219 4730 scope.go:117] "RemoveContainer" containerID="21ff7005bcd1bccc40ef59c3aa64dacb5aaa02cbc62008dfcd39aed9feb4d949" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.283817 4730 scope.go:117] "RemoveContainer" containerID="ce3642118be30648d075448cc24aed002b9f6c61fba1bc57687097b763091ecc" Sep 30 09:53:41 crc kubenswrapper[4730]: E0930 09:53:41.284657 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce3642118be30648d075448cc24aed002b9f6c61fba1bc57687097b763091ecc\": container with ID starting with ce3642118be30648d075448cc24aed002b9f6c61fba1bc57687097b763091ecc not found: ID does not exist" containerID="ce3642118be30648d075448cc24aed002b9f6c61fba1bc57687097b763091ecc" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.284733 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce3642118be30648d075448cc24aed002b9f6c61fba1bc57687097b763091ecc"} err="failed to get container status \"ce3642118be30648d075448cc24aed002b9f6c61fba1bc57687097b763091ecc\": rpc error: code = NotFound desc = could not find container \"ce3642118be30648d075448cc24aed002b9f6c61fba1bc57687097b763091ecc\": container with ID starting with ce3642118be30648d075448cc24aed002b9f6c61fba1bc57687097b763091ecc not found: ID does not exist" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.284783 4730 scope.go:117] "RemoveContainer" containerID="023ff43605a0395a68aaa779ba5dc415d6df8a3a8d37858e4705dfaaca9dc805" Sep 30 09:53:41 crc kubenswrapper[4730]: E0930 09:53:41.285229 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"023ff43605a0395a68aaa779ba5dc415d6df8a3a8d37858e4705dfaaca9dc805\": container with ID starting with 023ff43605a0395a68aaa779ba5dc415d6df8a3a8d37858e4705dfaaca9dc805 not found: ID does not exist" containerID="023ff43605a0395a68aaa779ba5dc415d6df8a3a8d37858e4705dfaaca9dc805" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.285273 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"023ff43605a0395a68aaa779ba5dc415d6df8a3a8d37858e4705dfaaca9dc805"} err="failed to get container status \"023ff43605a0395a68aaa779ba5dc415d6df8a3a8d37858e4705dfaaca9dc805\": rpc error: code = NotFound desc = could not find container \"023ff43605a0395a68aaa779ba5dc415d6df8a3a8d37858e4705dfaaca9dc805\": container with ID starting with 023ff43605a0395a68aaa779ba5dc415d6df8a3a8d37858e4705dfaaca9dc805 not found: ID does not exist" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.285302 4730 scope.go:117] "RemoveContainer" containerID="21ff7005bcd1bccc40ef59c3aa64dacb5aaa02cbc62008dfcd39aed9feb4d949" Sep 30 09:53:41 crc kubenswrapper[4730]: E0930 09:53:41.285777 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21ff7005bcd1bccc40ef59c3aa64dacb5aaa02cbc62008dfcd39aed9feb4d949\": container with ID starting with 21ff7005bcd1bccc40ef59c3aa64dacb5aaa02cbc62008dfcd39aed9feb4d949 not found: ID does not exist" containerID="21ff7005bcd1bccc40ef59c3aa64dacb5aaa02cbc62008dfcd39aed9feb4d949" Sep 30 09:53:41 crc kubenswrapper[4730]: I0930 09:53:41.285879 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21ff7005bcd1bccc40ef59c3aa64dacb5aaa02cbc62008dfcd39aed9feb4d949"} err="failed to get container status \"21ff7005bcd1bccc40ef59c3aa64dacb5aaa02cbc62008dfcd39aed9feb4d949\": rpc error: code = NotFound desc = could not find container \"21ff7005bcd1bccc40ef59c3aa64dacb5aaa02cbc62008dfcd39aed9feb4d949\": container with ID starting with 21ff7005bcd1bccc40ef59c3aa64dacb5aaa02cbc62008dfcd39aed9feb4d949 not found: ID does not exist" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.012406 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-6gt5z" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132174 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xgf65"] Sep 30 09:53:42 crc kubenswrapper[4730]: E0930 09:53:42.132400 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57333b74-aac2-4b1b-af18-e4dece554edb" containerName="extract-content" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132416 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="57333b74-aac2-4b1b-af18-e4dece554edb" containerName="extract-content" Sep 30 09:53:42 crc kubenswrapper[4730]: E0930 09:53:42.132423 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b437c48c-f825-4a51-9076-acf5dcd25e36" containerName="marketplace-operator" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132429 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="b437c48c-f825-4a51-9076-acf5dcd25e36" containerName="marketplace-operator" Sep 30 09:53:42 crc kubenswrapper[4730]: E0930 09:53:42.132437 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57333b74-aac2-4b1b-af18-e4dece554edb" containerName="registry-server" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132443 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="57333b74-aac2-4b1b-af18-e4dece554edb" containerName="registry-server" Sep 30 09:53:42 crc kubenswrapper[4730]: E0930 09:53:42.132451 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ea0a817-d369-46fc-9e35-ab227abcbf25" containerName="registry-server" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132456 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ea0a817-d369-46fc-9e35-ab227abcbf25" containerName="registry-server" Sep 30 09:53:42 crc kubenswrapper[4730]: E0930 09:53:42.132465 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57333b74-aac2-4b1b-af18-e4dece554edb" containerName="extract-utilities" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132471 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="57333b74-aac2-4b1b-af18-e4dece554edb" containerName="extract-utilities" Sep 30 09:53:42 crc kubenswrapper[4730]: E0930 09:53:42.132482 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd78d8d1-8617-4c3d-9205-abd8bbdde710" containerName="extract-content" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132487 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd78d8d1-8617-4c3d-9205-abd8bbdde710" containerName="extract-content" Sep 30 09:53:42 crc kubenswrapper[4730]: E0930 09:53:42.132497 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" containerName="extract-utilities" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132502 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" containerName="extract-utilities" Sep 30 09:53:42 crc kubenswrapper[4730]: E0930 09:53:42.132510 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ea0a817-d369-46fc-9e35-ab227abcbf25" containerName="extract-content" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132517 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ea0a817-d369-46fc-9e35-ab227abcbf25" containerName="extract-content" Sep 30 09:53:42 crc kubenswrapper[4730]: E0930 09:53:42.132525 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd78d8d1-8617-4c3d-9205-abd8bbdde710" containerName="extract-utilities" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132531 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd78d8d1-8617-4c3d-9205-abd8bbdde710" containerName="extract-utilities" Sep 30 09:53:42 crc kubenswrapper[4730]: E0930 09:53:42.132539 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" containerName="extract-content" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132544 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" containerName="extract-content" Sep 30 09:53:42 crc kubenswrapper[4730]: E0930 09:53:42.132555 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd78d8d1-8617-4c3d-9205-abd8bbdde710" containerName="registry-server" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132560 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd78d8d1-8617-4c3d-9205-abd8bbdde710" containerName="registry-server" Sep 30 09:53:42 crc kubenswrapper[4730]: E0930 09:53:42.132570 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ea0a817-d369-46fc-9e35-ab227abcbf25" containerName="extract-utilities" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132581 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ea0a817-d369-46fc-9e35-ab227abcbf25" containerName="extract-utilities" Sep 30 09:53:42 crc kubenswrapper[4730]: E0930 09:53:42.132597 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" containerName="registry-server" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132603 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" containerName="registry-server" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132714 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="57333b74-aac2-4b1b-af18-e4dece554edb" containerName="registry-server" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132724 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd78d8d1-8617-4c3d-9205-abd8bbdde710" containerName="registry-server" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132734 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="b437c48c-f825-4a51-9076-acf5dcd25e36" containerName="marketplace-operator" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132743 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" containerName="registry-server" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.132752 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ea0a817-d369-46fc-9e35-ab227abcbf25" containerName="registry-server" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.133554 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.136151 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.153027 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgf65"] Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.213967 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c86cq\" (UniqueName: \"kubernetes.io/projected/01e08669-831d-492e-b8eb-302fac0ca54a-kube-api-access-c86cq\") pod \"redhat-marketplace-xgf65\" (UID: \"01e08669-831d-492e-b8eb-302fac0ca54a\") " pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.214085 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01e08669-831d-492e-b8eb-302fac0ca54a-catalog-content\") pod \"redhat-marketplace-xgf65\" (UID: \"01e08669-831d-492e-b8eb-302fac0ca54a\") " pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.214112 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01e08669-831d-492e-b8eb-302fac0ca54a-utilities\") pod \"redhat-marketplace-xgf65\" (UID: \"01e08669-831d-492e-b8eb-302fac0ca54a\") " pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.315381 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c86cq\" (UniqueName: \"kubernetes.io/projected/01e08669-831d-492e-b8eb-302fac0ca54a-kube-api-access-c86cq\") pod \"redhat-marketplace-xgf65\" (UID: \"01e08669-831d-492e-b8eb-302fac0ca54a\") " pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.315462 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01e08669-831d-492e-b8eb-302fac0ca54a-catalog-content\") pod \"redhat-marketplace-xgf65\" (UID: \"01e08669-831d-492e-b8eb-302fac0ca54a\") " pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.315478 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01e08669-831d-492e-b8eb-302fac0ca54a-utilities\") pod \"redhat-marketplace-xgf65\" (UID: \"01e08669-831d-492e-b8eb-302fac0ca54a\") " pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.315968 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01e08669-831d-492e-b8eb-302fac0ca54a-catalog-content\") pod \"redhat-marketplace-xgf65\" (UID: \"01e08669-831d-492e-b8eb-302fac0ca54a\") " pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.315991 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01e08669-831d-492e-b8eb-302fac0ca54a-utilities\") pod \"redhat-marketplace-xgf65\" (UID: \"01e08669-831d-492e-b8eb-302fac0ca54a\") " pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.331757 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-njvdw"] Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.332942 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.335353 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.341016 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-njvdw"] Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.346438 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c86cq\" (UniqueName: \"kubernetes.io/projected/01e08669-831d-492e-b8eb-302fac0ca54a-kube-api-access-c86cq\") pod \"redhat-marketplace-xgf65\" (UID: \"01e08669-831d-492e-b8eb-302fac0ca54a\") " pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.387993 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="297e4d9f-18c2-4e11-a89b-11df93cba4ef" path="/var/lib/kubelet/pods/297e4d9f-18c2-4e11-a89b-11df93cba4ef/volumes" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.388721 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57333b74-aac2-4b1b-af18-e4dece554edb" path="/var/lib/kubelet/pods/57333b74-aac2-4b1b-af18-e4dece554edb/volumes" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.389351 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ea0a817-d369-46fc-9e35-ab227abcbf25" path="/var/lib/kubelet/pods/9ea0a817-d369-46fc-9e35-ab227abcbf25/volumes" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.390442 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b437c48c-f825-4a51-9076-acf5dcd25e36" path="/var/lib/kubelet/pods/b437c48c-f825-4a51-9076-acf5dcd25e36/volumes" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.390928 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd78d8d1-8617-4c3d-9205-abd8bbdde710" path="/var/lib/kubelet/pods/cd78d8d1-8617-4c3d-9205-abd8bbdde710/volumes" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.417210 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdtmn\" (UniqueName: \"kubernetes.io/projected/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-kube-api-access-xdtmn\") pod \"redhat-operators-njvdw\" (UID: \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\") " pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.417474 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-catalog-content\") pod \"redhat-operators-njvdw\" (UID: \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\") " pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.417688 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-utilities\") pod \"redhat-operators-njvdw\" (UID: \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\") " pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.451104 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.519358 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdtmn\" (UniqueName: \"kubernetes.io/projected/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-kube-api-access-xdtmn\") pod \"redhat-operators-njvdw\" (UID: \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\") " pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.519437 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-catalog-content\") pod \"redhat-operators-njvdw\" (UID: \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\") " pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.519515 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-utilities\") pod \"redhat-operators-njvdw\" (UID: \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\") " pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.520070 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-utilities\") pod \"redhat-operators-njvdw\" (UID: \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\") " pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.520360 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-catalog-content\") pod \"redhat-operators-njvdw\" (UID: \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\") " pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.544577 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdtmn\" (UniqueName: \"kubernetes.io/projected/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-kube-api-access-xdtmn\") pod \"redhat-operators-njvdw\" (UID: \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\") " pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.666388 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:42 crc kubenswrapper[4730]: I0930 09:53:42.690088 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgf65"] Sep 30 09:53:42 crc kubenswrapper[4730]: W0930 09:53:42.694440 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod01e08669_831d_492e_b8eb_302fac0ca54a.slice/crio-124575caf1976ba8b7c2efefdef671cf51d49f949e1bf173dbc4f425adbd9ac0 WatchSource:0}: Error finding container 124575caf1976ba8b7c2efefdef671cf51d49f949e1bf173dbc4f425adbd9ac0: Status 404 returned error can't find the container with id 124575caf1976ba8b7c2efefdef671cf51d49f949e1bf173dbc4f425adbd9ac0 Sep 30 09:53:43 crc kubenswrapper[4730]: I0930 09:53:43.011260 4730 generic.go:334] "Generic (PLEG): container finished" podID="01e08669-831d-492e-b8eb-302fac0ca54a" containerID="c4e69d6df38ca21d00d47d81fb0e053d55d3afbba8f845e9fbda2885e6ab3fbb" exitCode=0 Sep 30 09:53:43 crc kubenswrapper[4730]: I0930 09:53:43.011364 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgf65" event={"ID":"01e08669-831d-492e-b8eb-302fac0ca54a","Type":"ContainerDied","Data":"c4e69d6df38ca21d00d47d81fb0e053d55d3afbba8f845e9fbda2885e6ab3fbb"} Sep 30 09:53:43 crc kubenswrapper[4730]: I0930 09:53:43.011667 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgf65" event={"ID":"01e08669-831d-492e-b8eb-302fac0ca54a","Type":"ContainerStarted","Data":"124575caf1976ba8b7c2efefdef671cf51d49f949e1bf173dbc4f425adbd9ac0"} Sep 30 09:53:43 crc kubenswrapper[4730]: I0930 09:53:43.066857 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-njvdw"] Sep 30 09:53:43 crc kubenswrapper[4730]: W0930 09:53:43.073088 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod835e01d4_2f60_40dd_8fb7_c4ac9adcc01f.slice/crio-6281f0cc404cdb8b9b14bbe62b3b92f3b7889d1e6668019cf613575460d9ae04 WatchSource:0}: Error finding container 6281f0cc404cdb8b9b14bbe62b3b92f3b7889d1e6668019cf613575460d9ae04: Status 404 returned error can't find the container with id 6281f0cc404cdb8b9b14bbe62b3b92f3b7889d1e6668019cf613575460d9ae04 Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.019331 4730 generic.go:334] "Generic (PLEG): container finished" podID="835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" containerID="6df706cd8ae6164f348a9a798e17b92240c4f8ea4a04ad92b0e61e7dcc0dc95d" exitCode=0 Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.019417 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njvdw" event={"ID":"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f","Type":"ContainerDied","Data":"6df706cd8ae6164f348a9a798e17b92240c4f8ea4a04ad92b0e61e7dcc0dc95d"} Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.019779 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njvdw" event={"ID":"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f","Type":"ContainerStarted","Data":"6281f0cc404cdb8b9b14bbe62b3b92f3b7889d1e6668019cf613575460d9ae04"} Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.021961 4730 generic.go:334] "Generic (PLEG): container finished" podID="01e08669-831d-492e-b8eb-302fac0ca54a" containerID="a926aa007275aaf05777adcdaf3a55727cd79fa172fadc723f1f0c970e5fa2bd" exitCode=0 Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.022105 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgf65" event={"ID":"01e08669-831d-492e-b8eb-302fac0ca54a","Type":"ContainerDied","Data":"a926aa007275aaf05777adcdaf3a55727cd79fa172fadc723f1f0c970e5fa2bd"} Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.536312 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jr5df"] Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.537657 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.539992 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.544765 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jr5df"] Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.646471 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-catalog-content\") pod \"certified-operators-jr5df\" (UID: \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\") " pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.646561 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmxst\" (UniqueName: \"kubernetes.io/projected/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-kube-api-access-jmxst\") pod \"certified-operators-jr5df\" (UID: \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\") " pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.646638 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-utilities\") pod \"certified-operators-jr5df\" (UID: \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\") " pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.732878 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b9wqt"] Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.733910 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.736937 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.747939 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-catalog-content\") pod \"certified-operators-jr5df\" (UID: \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\") " pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.748046 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmxst\" (UniqueName: \"kubernetes.io/projected/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-kube-api-access-jmxst\") pod \"certified-operators-jr5df\" (UID: \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\") " pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.748124 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-utilities\") pod \"certified-operators-jr5df\" (UID: \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\") " pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.748727 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-utilities\") pod \"certified-operators-jr5df\" (UID: \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\") " pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.749024 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-catalog-content\") pod \"certified-operators-jr5df\" (UID: \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\") " pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.754277 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b9wqt"] Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.809629 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmxst\" (UniqueName: \"kubernetes.io/projected/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-kube-api-access-jmxst\") pod \"certified-operators-jr5df\" (UID: \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\") " pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.850284 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0b4345a-451e-4895-be55-f2ce12708fa4-utilities\") pod \"community-operators-b9wqt\" (UID: \"b0b4345a-451e-4895-be55-f2ce12708fa4\") " pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.850432 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0b4345a-451e-4895-be55-f2ce12708fa4-catalog-content\") pod \"community-operators-b9wqt\" (UID: \"b0b4345a-451e-4895-be55-f2ce12708fa4\") " pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.850456 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rclm8\" (UniqueName: \"kubernetes.io/projected/b0b4345a-451e-4895-be55-f2ce12708fa4-kube-api-access-rclm8\") pod \"community-operators-b9wqt\" (UID: \"b0b4345a-451e-4895-be55-f2ce12708fa4\") " pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.885065 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.954280 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0b4345a-451e-4895-be55-f2ce12708fa4-catalog-content\") pod \"community-operators-b9wqt\" (UID: \"b0b4345a-451e-4895-be55-f2ce12708fa4\") " pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.954484 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rclm8\" (UniqueName: \"kubernetes.io/projected/b0b4345a-451e-4895-be55-f2ce12708fa4-kube-api-access-rclm8\") pod \"community-operators-b9wqt\" (UID: \"b0b4345a-451e-4895-be55-f2ce12708fa4\") " pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.954545 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0b4345a-451e-4895-be55-f2ce12708fa4-utilities\") pod \"community-operators-b9wqt\" (UID: \"b0b4345a-451e-4895-be55-f2ce12708fa4\") " pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.955031 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0b4345a-451e-4895-be55-f2ce12708fa4-utilities\") pod \"community-operators-b9wqt\" (UID: \"b0b4345a-451e-4895-be55-f2ce12708fa4\") " pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.955237 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0b4345a-451e-4895-be55-f2ce12708fa4-catalog-content\") pod \"community-operators-b9wqt\" (UID: \"b0b4345a-451e-4895-be55-f2ce12708fa4\") " pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:44 crc kubenswrapper[4730]: I0930 09:53:44.975149 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rclm8\" (UniqueName: \"kubernetes.io/projected/b0b4345a-451e-4895-be55-f2ce12708fa4-kube-api-access-rclm8\") pod \"community-operators-b9wqt\" (UID: \"b0b4345a-451e-4895-be55-f2ce12708fa4\") " pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:45 crc kubenswrapper[4730]: I0930 09:53:45.066101 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgf65" event={"ID":"01e08669-831d-492e-b8eb-302fac0ca54a","Type":"ContainerStarted","Data":"6d5e14d462d6805f187a3efee1a769baa9e7a117c4fab7d5423440714555a3b2"} Sep 30 09:53:45 crc kubenswrapper[4730]: I0930 09:53:45.083580 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jr5df"] Sep 30 09:53:45 crc kubenswrapper[4730]: I0930 09:53:45.129240 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:45 crc kubenswrapper[4730]: I0930 09:53:45.537296 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xgf65" podStartSLOduration=2.058197243 podStartE2EDuration="3.537276426s" podCreationTimestamp="2025-09-30 09:53:42 +0000 UTC" firstStartedPulling="2025-09-30 09:53:43.012991852 +0000 UTC m=+267.346251845" lastFinishedPulling="2025-09-30 09:53:44.492071035 +0000 UTC m=+268.825331028" observedRunningTime="2025-09-30 09:53:45.088647319 +0000 UTC m=+269.421907322" watchObservedRunningTime="2025-09-30 09:53:45.537276426 +0000 UTC m=+269.870536419" Sep 30 09:53:45 crc kubenswrapper[4730]: I0930 09:53:45.538866 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b9wqt"] Sep 30 09:53:45 crc kubenswrapper[4730]: W0930 09:53:45.557980 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0b4345a_451e_4895_be55_f2ce12708fa4.slice/crio-20d3b21c77481b65ddc944bfdf11401044c12a35e3304418eecf01db7ac016df WatchSource:0}: Error finding container 20d3b21c77481b65ddc944bfdf11401044c12a35e3304418eecf01db7ac016df: Status 404 returned error can't find the container with id 20d3b21c77481b65ddc944bfdf11401044c12a35e3304418eecf01db7ac016df Sep 30 09:53:46 crc kubenswrapper[4730]: I0930 09:53:46.074979 4730 generic.go:334] "Generic (PLEG): container finished" podID="1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" containerID="474ea612077c4839713c0596b54c15e12b3025be1808e620255c5ffecb9a1657" exitCode=0 Sep 30 09:53:46 crc kubenswrapper[4730]: I0930 09:53:46.075057 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jr5df" event={"ID":"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431","Type":"ContainerDied","Data":"474ea612077c4839713c0596b54c15e12b3025be1808e620255c5ffecb9a1657"} Sep 30 09:53:46 crc kubenswrapper[4730]: I0930 09:53:46.075086 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jr5df" event={"ID":"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431","Type":"ContainerStarted","Data":"074622f8b918635cb3f6e90107962e235fe2fe3efab5e980ec02ff88e5b2fc5e"} Sep 30 09:53:46 crc kubenswrapper[4730]: I0930 09:53:46.078097 4730 generic.go:334] "Generic (PLEG): container finished" podID="835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" containerID="7423b60c765ea89586fcc941bea89f7ed1f17a74cf3c3c95bba2b4c0068eb0cb" exitCode=0 Sep 30 09:53:46 crc kubenswrapper[4730]: I0930 09:53:46.078185 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njvdw" event={"ID":"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f","Type":"ContainerDied","Data":"7423b60c765ea89586fcc941bea89f7ed1f17a74cf3c3c95bba2b4c0068eb0cb"} Sep 30 09:53:46 crc kubenswrapper[4730]: I0930 09:53:46.080499 4730 generic.go:334] "Generic (PLEG): container finished" podID="b0b4345a-451e-4895-be55-f2ce12708fa4" containerID="0626e6f4f0f26a4b1cbb8e66e54855d023cbd681334bdc7614c255c69aefe63a" exitCode=0 Sep 30 09:53:46 crc kubenswrapper[4730]: I0930 09:53:46.080593 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9wqt" event={"ID":"b0b4345a-451e-4895-be55-f2ce12708fa4","Type":"ContainerDied","Data":"0626e6f4f0f26a4b1cbb8e66e54855d023cbd681334bdc7614c255c69aefe63a"} Sep 30 09:53:46 crc kubenswrapper[4730]: I0930 09:53:46.080658 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9wqt" event={"ID":"b0b4345a-451e-4895-be55-f2ce12708fa4","Type":"ContainerStarted","Data":"20d3b21c77481b65ddc944bfdf11401044c12a35e3304418eecf01db7ac016df"} Sep 30 09:53:47 crc kubenswrapper[4730]: I0930 09:53:47.088941 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njvdw" event={"ID":"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f","Type":"ContainerStarted","Data":"778483313a3d26dd28acea420f08291c067043e405901167d45ec0ca30e33aa3"} Sep 30 09:53:47 crc kubenswrapper[4730]: I0930 09:53:47.090881 4730 generic.go:334] "Generic (PLEG): container finished" podID="1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" containerID="e78b976a73a1c907cfad9f7c6ada91b1ec8ae5cfee49439908b7ff86d3a26289" exitCode=0 Sep 30 09:53:47 crc kubenswrapper[4730]: I0930 09:53:47.090920 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jr5df" event={"ID":"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431","Type":"ContainerDied","Data":"e78b976a73a1c907cfad9f7c6ada91b1ec8ae5cfee49439908b7ff86d3a26289"} Sep 30 09:53:47 crc kubenswrapper[4730]: I0930 09:53:47.107012 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-njvdw" podStartSLOduration=2.55775208 podStartE2EDuration="5.106993072s" podCreationTimestamp="2025-09-30 09:53:42 +0000 UTC" firstStartedPulling="2025-09-30 09:53:44.021935463 +0000 UTC m=+268.355195456" lastFinishedPulling="2025-09-30 09:53:46.571176465 +0000 UTC m=+270.904436448" observedRunningTime="2025-09-30 09:53:47.104705147 +0000 UTC m=+271.437965150" watchObservedRunningTime="2025-09-30 09:53:47.106993072 +0000 UTC m=+271.440253075" Sep 30 09:53:48 crc kubenswrapper[4730]: I0930 09:53:48.097903 4730 generic.go:334] "Generic (PLEG): container finished" podID="b0b4345a-451e-4895-be55-f2ce12708fa4" containerID="7be45785295aa2ad6926d97c8086dbde2c1081913c8bf0b97700d83c457dc33f" exitCode=0 Sep 30 09:53:48 crc kubenswrapper[4730]: I0930 09:53:48.097960 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9wqt" event={"ID":"b0b4345a-451e-4895-be55-f2ce12708fa4","Type":"ContainerDied","Data":"7be45785295aa2ad6926d97c8086dbde2c1081913c8bf0b97700d83c457dc33f"} Sep 30 09:53:49 crc kubenswrapper[4730]: I0930 09:53:49.117158 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9wqt" event={"ID":"b0b4345a-451e-4895-be55-f2ce12708fa4","Type":"ContainerStarted","Data":"859899378ef59f503497c6ca5b83b5896dcd66326889f2c8a57d4b7df5ea9a44"} Sep 30 09:53:49 crc kubenswrapper[4730]: I0930 09:53:49.120327 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jr5df" event={"ID":"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431","Type":"ContainerStarted","Data":"4a228e88b8dec09568b673ea4953b23b8547bb53e34c39e88a01451d16c00952"} Sep 30 09:53:49 crc kubenswrapper[4730]: I0930 09:53:49.152139 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b9wqt" podStartSLOduration=2.667327809 podStartE2EDuration="5.152118436s" podCreationTimestamp="2025-09-30 09:53:44 +0000 UTC" firstStartedPulling="2025-09-30 09:53:46.090024608 +0000 UTC m=+270.423284601" lastFinishedPulling="2025-09-30 09:53:48.574815235 +0000 UTC m=+272.908075228" observedRunningTime="2025-09-30 09:53:49.149256488 +0000 UTC m=+273.482516491" watchObservedRunningTime="2025-09-30 09:53:49.152118436 +0000 UTC m=+273.485378429" Sep 30 09:53:49 crc kubenswrapper[4730]: I0930 09:53:49.170300 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jr5df" podStartSLOduration=3.606950108 podStartE2EDuration="5.170281402s" podCreationTimestamp="2025-09-30 09:53:44 +0000 UTC" firstStartedPulling="2025-09-30 09:53:46.077206591 +0000 UTC m=+270.410466594" lastFinishedPulling="2025-09-30 09:53:47.640537895 +0000 UTC m=+271.973797888" observedRunningTime="2025-09-30 09:53:49.167764542 +0000 UTC m=+273.501024545" watchObservedRunningTime="2025-09-30 09:53:49.170281402 +0000 UTC m=+273.503541395" Sep 30 09:53:52 crc kubenswrapper[4730]: I0930 09:53:52.452600 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:52 crc kubenswrapper[4730]: I0930 09:53:52.453564 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:52 crc kubenswrapper[4730]: I0930 09:53:52.505127 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:52 crc kubenswrapper[4730]: I0930 09:53:52.666576 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:52 crc kubenswrapper[4730]: I0930 09:53:52.667352 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:52 crc kubenswrapper[4730]: I0930 09:53:52.708255 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:53 crc kubenswrapper[4730]: I0930 09:53:53.183022 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 09:53:53 crc kubenswrapper[4730]: I0930 09:53:53.188294 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 09:53:54 crc kubenswrapper[4730]: I0930 09:53:54.886103 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:54 crc kubenswrapper[4730]: I0930 09:53:54.886409 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:54 crc kubenswrapper[4730]: I0930 09:53:54.927773 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:55 crc kubenswrapper[4730]: I0930 09:53:55.129741 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:55 crc kubenswrapper[4730]: I0930 09:53:55.129804 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:55 crc kubenswrapper[4730]: I0930 09:53:55.171702 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:53:55 crc kubenswrapper[4730]: I0930 09:53:55.195236 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jr5df" Sep 30 09:53:55 crc kubenswrapper[4730]: I0930 09:53:55.219340 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b9wqt" Sep 30 09:55:02 crc kubenswrapper[4730]: I0930 09:55:02.336945 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 09:55:02 crc kubenswrapper[4730]: I0930 09:55:02.337357 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 09:55:32 crc kubenswrapper[4730]: I0930 09:55:32.336291 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 09:55:32 crc kubenswrapper[4730]: I0930 09:55:32.336728 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 09:56:02 crc kubenswrapper[4730]: I0930 09:56:02.337291 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 09:56:02 crc kubenswrapper[4730]: I0930 09:56:02.339414 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 09:56:02 crc kubenswrapper[4730]: I0930 09:56:02.339597 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:56:02 crc kubenswrapper[4730]: I0930 09:56:02.340562 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"329b3ada6780543f63d35d6db2f1dc9bd16e3f7ca3e03a686699b3e9535f2065"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 09:56:02 crc kubenswrapper[4730]: I0930 09:56:02.340835 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://329b3ada6780543f63d35d6db2f1dc9bd16e3f7ca3e03a686699b3e9535f2065" gracePeriod=600 Sep 30 09:56:02 crc kubenswrapper[4730]: I0930 09:56:02.921849 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="329b3ada6780543f63d35d6db2f1dc9bd16e3f7ca3e03a686699b3e9535f2065" exitCode=0 Sep 30 09:56:02 crc kubenswrapper[4730]: I0930 09:56:02.921911 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"329b3ada6780543f63d35d6db2f1dc9bd16e3f7ca3e03a686699b3e9535f2065"} Sep 30 09:56:02 crc kubenswrapper[4730]: I0930 09:56:02.922171 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"dafc5f81114ac6c819cc935ab6256c7275b10fad50ffbd38ad7d62bed43ffa86"} Sep 30 09:56:02 crc kubenswrapper[4730]: I0930 09:56:02.922206 4730 scope.go:117] "RemoveContainer" containerID="defac85c6bf34a3ea262b1ad293516b72b27c3f3e328f3f970694bb978bf90a6" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.366243 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fpvfq"] Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.368548 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.378466 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fpvfq"] Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.522148 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/29f4ff15-a72d-4688-8a43-fb4731a72e6b-registry-tls\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.522469 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/29f4ff15-a72d-4688-8a43-fb4731a72e6b-registry-certificates\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.522570 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5qgs\" (UniqueName: \"kubernetes.io/projected/29f4ff15-a72d-4688-8a43-fb4731a72e6b-kube-api-access-g5qgs\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.522696 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/29f4ff15-a72d-4688-8a43-fb4731a72e6b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.522841 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/29f4ff15-a72d-4688-8a43-fb4731a72e6b-bound-sa-token\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.522972 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/29f4ff15-a72d-4688-8a43-fb4731a72e6b-trusted-ca\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.523112 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.523738 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/29f4ff15-a72d-4688-8a43-fb4731a72e6b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.545167 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.625336 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/29f4ff15-a72d-4688-8a43-fb4731a72e6b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.625403 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/29f4ff15-a72d-4688-8a43-fb4731a72e6b-bound-sa-token\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.625424 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/29f4ff15-a72d-4688-8a43-fb4731a72e6b-trusted-ca\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.625456 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/29f4ff15-a72d-4688-8a43-fb4731a72e6b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.625493 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/29f4ff15-a72d-4688-8a43-fb4731a72e6b-registry-tls\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.625510 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/29f4ff15-a72d-4688-8a43-fb4731a72e6b-registry-certificates\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.625527 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5qgs\" (UniqueName: \"kubernetes.io/projected/29f4ff15-a72d-4688-8a43-fb4731a72e6b-kube-api-access-g5qgs\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.626300 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/29f4ff15-a72d-4688-8a43-fb4731a72e6b-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.626968 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/29f4ff15-a72d-4688-8a43-fb4731a72e6b-registry-certificates\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.627133 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/29f4ff15-a72d-4688-8a43-fb4731a72e6b-trusted-ca\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.631281 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/29f4ff15-a72d-4688-8a43-fb4731a72e6b-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.631952 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/29f4ff15-a72d-4688-8a43-fb4731a72e6b-registry-tls\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.641587 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/29f4ff15-a72d-4688-8a43-fb4731a72e6b-bound-sa-token\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.641918 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5qgs\" (UniqueName: \"kubernetes.io/projected/29f4ff15-a72d-4688-8a43-fb4731a72e6b-kube-api-access-g5qgs\") pod \"image-registry-66df7c8f76-fpvfq\" (UID: \"29f4ff15-a72d-4688-8a43-fb4731a72e6b\") " pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.686598 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.858676 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fpvfq"] Sep 30 09:56:06 crc kubenswrapper[4730]: I0930 09:56:06.945600 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" event={"ID":"29f4ff15-a72d-4688-8a43-fb4731a72e6b","Type":"ContainerStarted","Data":"d6c1de558e2287343cd7b389a28dd31c3c7336662e373386fccbcb41635f056f"} Sep 30 09:56:07 crc kubenswrapper[4730]: I0930 09:56:07.952903 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" event={"ID":"29f4ff15-a72d-4688-8a43-fb4731a72e6b","Type":"ContainerStarted","Data":"ea32638b9636f091ad8357fa05d82fb3a1537064615c6983578b440f9b04a1de"} Sep 30 09:56:07 crc kubenswrapper[4730]: I0930 09:56:07.953347 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:07 crc kubenswrapper[4730]: I0930 09:56:07.973802 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" podStartSLOduration=1.973783873 podStartE2EDuration="1.973783873s" podCreationTimestamp="2025-09-30 09:56:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:56:07.97140049 +0000 UTC m=+412.304660493" watchObservedRunningTime="2025-09-30 09:56:07.973783873 +0000 UTC m=+412.307043856" Sep 30 09:56:26 crc kubenswrapper[4730]: I0930 09:56:26.692006 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-fpvfq" Sep 30 09:56:26 crc kubenswrapper[4730]: I0930 09:56:26.755173 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xdzzr"] Sep 30 09:56:51 crc kubenswrapper[4730]: I0930 09:56:51.798946 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" podUID="3f90d05f-7820-4af0-8894-6d63dc672f33" containerName="registry" containerID="cri-o://925a3e6dc870bbe1c06be6e0fb4bcc9955a89ce4133720edb6f2cf30b8e3874f" gracePeriod=30 Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.147365 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.180495 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzwzh\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-kube-api-access-rzwzh\") pod \"3f90d05f-7820-4af0-8894-6d63dc672f33\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.180561 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-bound-sa-token\") pod \"3f90d05f-7820-4af0-8894-6d63dc672f33\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.180652 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3f90d05f-7820-4af0-8894-6d63dc672f33-ca-trust-extracted\") pod \"3f90d05f-7820-4af0-8894-6d63dc672f33\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.180809 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"3f90d05f-7820-4af0-8894-6d63dc672f33\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.181082 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3f90d05f-7820-4af0-8894-6d63dc672f33-registry-certificates\") pod \"3f90d05f-7820-4af0-8894-6d63dc672f33\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.181174 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3f90d05f-7820-4af0-8894-6d63dc672f33-installation-pull-secrets\") pod \"3f90d05f-7820-4af0-8894-6d63dc672f33\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.181222 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-registry-tls\") pod \"3f90d05f-7820-4af0-8894-6d63dc672f33\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.181298 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3f90d05f-7820-4af0-8894-6d63dc672f33-trusted-ca\") pod \"3f90d05f-7820-4af0-8894-6d63dc672f33\" (UID: \"3f90d05f-7820-4af0-8894-6d63dc672f33\") " Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.182013 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f90d05f-7820-4af0-8894-6d63dc672f33-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "3f90d05f-7820-4af0-8894-6d63dc672f33" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.182443 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f90d05f-7820-4af0-8894-6d63dc672f33-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "3f90d05f-7820-4af0-8894-6d63dc672f33" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.182807 4730 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3f90d05f-7820-4af0-8894-6d63dc672f33-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.182832 4730 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3f90d05f-7820-4af0-8894-6d63dc672f33-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.187344 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f90d05f-7820-4af0-8894-6d63dc672f33-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "3f90d05f-7820-4af0-8894-6d63dc672f33" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.187505 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-kube-api-access-rzwzh" (OuterVolumeSpecName: "kube-api-access-rzwzh") pod "3f90d05f-7820-4af0-8894-6d63dc672f33" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33"). InnerVolumeSpecName "kube-api-access-rzwzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.188424 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "3f90d05f-7820-4af0-8894-6d63dc672f33" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.188901 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "3f90d05f-7820-4af0-8894-6d63dc672f33" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.197376 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "3f90d05f-7820-4af0-8894-6d63dc672f33" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.203106 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f90d05f-7820-4af0-8894-6d63dc672f33-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "3f90d05f-7820-4af0-8894-6d63dc672f33" (UID: "3f90d05f-7820-4af0-8894-6d63dc672f33"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.217812 4730 generic.go:334] "Generic (PLEG): container finished" podID="3f90d05f-7820-4af0-8894-6d63dc672f33" containerID="925a3e6dc870bbe1c06be6e0fb4bcc9955a89ce4133720edb6f2cf30b8e3874f" exitCode=0 Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.217880 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" event={"ID":"3f90d05f-7820-4af0-8894-6d63dc672f33","Type":"ContainerDied","Data":"925a3e6dc870bbe1c06be6e0fb4bcc9955a89ce4133720edb6f2cf30b8e3874f"} Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.217919 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" event={"ID":"3f90d05f-7820-4af0-8894-6d63dc672f33","Type":"ContainerDied","Data":"b40a39b2fd2392d4d71a6c43cd8556afbdc15e514538736631c8aef71d0ca341"} Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.217941 4730 scope.go:117] "RemoveContainer" containerID="925a3e6dc870bbe1c06be6e0fb4bcc9955a89ce4133720edb6f2cf30b8e3874f" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.218112 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xdzzr" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.235136 4730 scope.go:117] "RemoveContainer" containerID="925a3e6dc870bbe1c06be6e0fb4bcc9955a89ce4133720edb6f2cf30b8e3874f" Sep 30 09:56:52 crc kubenswrapper[4730]: E0930 09:56:52.235643 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"925a3e6dc870bbe1c06be6e0fb4bcc9955a89ce4133720edb6f2cf30b8e3874f\": container with ID starting with 925a3e6dc870bbe1c06be6e0fb4bcc9955a89ce4133720edb6f2cf30b8e3874f not found: ID does not exist" containerID="925a3e6dc870bbe1c06be6e0fb4bcc9955a89ce4133720edb6f2cf30b8e3874f" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.235737 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"925a3e6dc870bbe1c06be6e0fb4bcc9955a89ce4133720edb6f2cf30b8e3874f"} err="failed to get container status \"925a3e6dc870bbe1c06be6e0fb4bcc9955a89ce4133720edb6f2cf30b8e3874f\": rpc error: code = NotFound desc = could not find container \"925a3e6dc870bbe1c06be6e0fb4bcc9955a89ce4133720edb6f2cf30b8e3874f\": container with ID starting with 925a3e6dc870bbe1c06be6e0fb4bcc9955a89ce4133720edb6f2cf30b8e3874f not found: ID does not exist" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.258650 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xdzzr"] Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.264040 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xdzzr"] Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.284538 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzwzh\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-kube-api-access-rzwzh\") on node \"crc\" DevicePath \"\"" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.284583 4730 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.284597 4730 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3f90d05f-7820-4af0-8894-6d63dc672f33-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.284656 4730 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3f90d05f-7820-4af0-8894-6d63dc672f33-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.284670 4730 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3f90d05f-7820-4af0-8894-6d63dc672f33-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 09:56:52 crc kubenswrapper[4730]: I0930 09:56:52.389445 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f90d05f-7820-4af0-8894-6d63dc672f33" path="/var/lib/kubelet/pods/3f90d05f-7820-4af0-8894-6d63dc672f33/volumes" Sep 30 09:58:02 crc kubenswrapper[4730]: I0930 09:58:02.336601 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 09:58:02 crc kubenswrapper[4730]: I0930 09:58:02.337108 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 09:58:32 crc kubenswrapper[4730]: I0930 09:58:32.337121 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 09:58:32 crc kubenswrapper[4730]: I0930 09:58:32.337857 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 09:59:02 crc kubenswrapper[4730]: I0930 09:59:02.336593 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 09:59:02 crc kubenswrapper[4730]: I0930 09:59:02.337234 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 09:59:02 crc kubenswrapper[4730]: I0930 09:59:02.337294 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 09:59:02 crc kubenswrapper[4730]: I0930 09:59:02.338039 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dafc5f81114ac6c819cc935ab6256c7275b10fad50ffbd38ad7d62bed43ffa86"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 09:59:02 crc kubenswrapper[4730]: I0930 09:59:02.338115 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://dafc5f81114ac6c819cc935ab6256c7275b10fad50ffbd38ad7d62bed43ffa86" gracePeriod=600 Sep 30 09:59:02 crc kubenswrapper[4730]: I0930 09:59:02.945750 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="dafc5f81114ac6c819cc935ab6256c7275b10fad50ffbd38ad7d62bed43ffa86" exitCode=0 Sep 30 09:59:02 crc kubenswrapper[4730]: I0930 09:59:02.945788 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"dafc5f81114ac6c819cc935ab6256c7275b10fad50ffbd38ad7d62bed43ffa86"} Sep 30 09:59:02 crc kubenswrapper[4730]: I0930 09:59:02.946274 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"e4ef153cbbd5d6d6260e417ec2d4e0d4bbc0012c9d4b4d0945d491a415dda27d"} Sep 30 09:59:02 crc kubenswrapper[4730]: I0930 09:59:02.946334 4730 scope.go:117] "RemoveContainer" containerID="329b3ada6780543f63d35d6db2f1dc9bd16e3f7ca3e03a686699b3e9535f2065" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.052193 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-9lmxl"] Sep 30 09:59:15 crc kubenswrapper[4730]: E0930 09:59:15.053042 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f90d05f-7820-4af0-8894-6d63dc672f33" containerName="registry" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.053059 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f90d05f-7820-4af0-8894-6d63dc672f33" containerName="registry" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.053189 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f90d05f-7820-4af0-8894-6d63dc672f33" containerName="registry" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.053571 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-9lmxl" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.055918 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.056089 4730 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-xlpm5" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.056105 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.069556 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-9lmxl"] Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.079568 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-k5vht"] Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.080288 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-k5vht" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.083275 4730 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-xw9dd" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.088897 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-p92bh"] Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.089698 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-p92bh" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.091448 4730 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-rhg6b" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.114373 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-p92bh"] Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.150761 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-k5vht"] Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.159470 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jp99w\" (UniqueName: \"kubernetes.io/projected/687b4645-7901-4987-adde-e3db6b502a52-kube-api-access-jp99w\") pod \"cert-manager-webhook-5655c58dd6-p92bh\" (UID: \"687b4645-7901-4987-adde-e3db6b502a52\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-p92bh" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.159548 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qlkg\" (UniqueName: \"kubernetes.io/projected/66889f3c-938c-46e9-a430-801bb731b19e-kube-api-access-8qlkg\") pod \"cert-manager-5b446d88c5-k5vht\" (UID: \"66889f3c-938c-46e9-a430-801bb731b19e\") " pod="cert-manager/cert-manager-5b446d88c5-k5vht" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.159597 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw44h\" (UniqueName: \"kubernetes.io/projected/5461a244-b4c4-48fb-9590-ebc310a13761-kube-api-access-fw44h\") pod \"cert-manager-cainjector-7f985d654d-9lmxl\" (UID: \"5461a244-b4c4-48fb-9590-ebc310a13761\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-9lmxl" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.261482 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jp99w\" (UniqueName: \"kubernetes.io/projected/687b4645-7901-4987-adde-e3db6b502a52-kube-api-access-jp99w\") pod \"cert-manager-webhook-5655c58dd6-p92bh\" (UID: \"687b4645-7901-4987-adde-e3db6b502a52\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-p92bh" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.261568 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qlkg\" (UniqueName: \"kubernetes.io/projected/66889f3c-938c-46e9-a430-801bb731b19e-kube-api-access-8qlkg\") pod \"cert-manager-5b446d88c5-k5vht\" (UID: \"66889f3c-938c-46e9-a430-801bb731b19e\") " pod="cert-manager/cert-manager-5b446d88c5-k5vht" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.261627 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw44h\" (UniqueName: \"kubernetes.io/projected/5461a244-b4c4-48fb-9590-ebc310a13761-kube-api-access-fw44h\") pod \"cert-manager-cainjector-7f985d654d-9lmxl\" (UID: \"5461a244-b4c4-48fb-9590-ebc310a13761\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-9lmxl" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.284955 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jp99w\" (UniqueName: \"kubernetes.io/projected/687b4645-7901-4987-adde-e3db6b502a52-kube-api-access-jp99w\") pod \"cert-manager-webhook-5655c58dd6-p92bh\" (UID: \"687b4645-7901-4987-adde-e3db6b502a52\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-p92bh" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.285195 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw44h\" (UniqueName: \"kubernetes.io/projected/5461a244-b4c4-48fb-9590-ebc310a13761-kube-api-access-fw44h\") pod \"cert-manager-cainjector-7f985d654d-9lmxl\" (UID: \"5461a244-b4c4-48fb-9590-ebc310a13761\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-9lmxl" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.285431 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qlkg\" (UniqueName: \"kubernetes.io/projected/66889f3c-938c-46e9-a430-801bb731b19e-kube-api-access-8qlkg\") pod \"cert-manager-5b446d88c5-k5vht\" (UID: \"66889f3c-938c-46e9-a430-801bb731b19e\") " pod="cert-manager/cert-manager-5b446d88c5-k5vht" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.371462 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-9lmxl" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.398425 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-k5vht" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.408832 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-p92bh" Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.622172 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-k5vht"] Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.637928 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.855338 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-9lmxl"] Sep 30 09:59:15 crc kubenswrapper[4730]: I0930 09:59:15.858744 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-p92bh"] Sep 30 09:59:16 crc kubenswrapper[4730]: I0930 09:59:16.026931 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-9lmxl" event={"ID":"5461a244-b4c4-48fb-9590-ebc310a13761","Type":"ContainerStarted","Data":"c60ded99bcac8e115efbfeed0241ed2a158766ec30dcff4226a9b584166227fd"} Sep 30 09:59:16 crc kubenswrapper[4730]: I0930 09:59:16.027908 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-k5vht" event={"ID":"66889f3c-938c-46e9-a430-801bb731b19e","Type":"ContainerStarted","Data":"3231b86d2aad978b5210f04bc9a58ce17fd6b9968bf5768adacb7e63f4bdb304"} Sep 30 09:59:16 crc kubenswrapper[4730]: I0930 09:59:16.028856 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-p92bh" event={"ID":"687b4645-7901-4987-adde-e3db6b502a52","Type":"ContainerStarted","Data":"9f68d240f80bd18260b546d9ef37d785b2c5dddd09cda838ff93f21e3633bda3"} Sep 30 09:59:20 crc kubenswrapper[4730]: I0930 09:59:20.053910 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-9lmxl" event={"ID":"5461a244-b4c4-48fb-9590-ebc310a13761","Type":"ContainerStarted","Data":"34ab40758a96c0383eecc98720ce00b5c97e521c921281d3f94fcb7a60ca2c23"} Sep 30 09:59:20 crc kubenswrapper[4730]: I0930 09:59:20.055344 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-k5vht" event={"ID":"66889f3c-938c-46e9-a430-801bb731b19e","Type":"ContainerStarted","Data":"9cb9937b764805c3f055ebac7d299d323d43794a617e156bdc35e25e7bdbe2a2"} Sep 30 09:59:20 crc kubenswrapper[4730]: I0930 09:59:20.057714 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-p92bh" event={"ID":"687b4645-7901-4987-adde-e3db6b502a52","Type":"ContainerStarted","Data":"6611967d5157d644a850cf0f6717c8d95fb29ca8793f1644f2d15cc188a31479"} Sep 30 09:59:20 crc kubenswrapper[4730]: I0930 09:59:20.058167 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-p92bh" Sep 30 09:59:20 crc kubenswrapper[4730]: I0930 09:59:20.070865 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-9lmxl" podStartSLOduration=1.655829271 podStartE2EDuration="5.070848085s" podCreationTimestamp="2025-09-30 09:59:15 +0000 UTC" firstStartedPulling="2025-09-30 09:59:15.861860336 +0000 UTC m=+600.195120329" lastFinishedPulling="2025-09-30 09:59:19.27687915 +0000 UTC m=+603.610139143" observedRunningTime="2025-09-30 09:59:20.065965594 +0000 UTC m=+604.399225587" watchObservedRunningTime="2025-09-30 09:59:20.070848085 +0000 UTC m=+604.404108078" Sep 30 09:59:20 crc kubenswrapper[4730]: I0930 09:59:20.078296 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-k5vht" podStartSLOduration=1.4288803159999999 podStartE2EDuration="5.078278341s" podCreationTimestamp="2025-09-30 09:59:15 +0000 UTC" firstStartedPulling="2025-09-30 09:59:15.637673056 +0000 UTC m=+599.970933049" lastFinishedPulling="2025-09-30 09:59:19.287071081 +0000 UTC m=+603.620331074" observedRunningTime="2025-09-30 09:59:20.077200893 +0000 UTC m=+604.410460886" watchObservedRunningTime="2025-09-30 09:59:20.078278341 +0000 UTC m=+604.411538324" Sep 30 09:59:20 crc kubenswrapper[4730]: I0930 09:59:20.095985 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-p92bh" podStartSLOduration=1.677597048 podStartE2EDuration="5.095967481s" podCreationTimestamp="2025-09-30 09:59:15 +0000 UTC" firstStartedPulling="2025-09-30 09:59:15.865595325 +0000 UTC m=+600.198855318" lastFinishedPulling="2025-09-30 09:59:19.283965758 +0000 UTC m=+603.617225751" observedRunningTime="2025-09-30 09:59:20.09254642 +0000 UTC m=+604.425806423" watchObservedRunningTime="2025-09-30 09:59:20.095967481 +0000 UTC m=+604.429227474" Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.412416 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-p92bh" Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.641377 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-c5vmh"] Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.641887 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovn-controller" containerID="cri-o://0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d" gracePeriod=30 Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.641916 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="sbdb" containerID="cri-o://59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2" gracePeriod=30 Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.641954 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="nbdb" containerID="cri-o://7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda" gracePeriod=30 Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.642023 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="northd" containerID="cri-o://6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323" gracePeriod=30 Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.641977 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494" gracePeriod=30 Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.642048 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovn-acl-logging" containerID="cri-o://bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc" gracePeriod=30 Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.642035 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="kube-rbac-proxy-node" containerID="cri-o://566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541" gracePeriod=30 Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.681722 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" containerID="cri-o://f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b" gracePeriod=30 Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.950691 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/3.log" Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.952670 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovn-acl-logging/0.log" Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.953124 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovn-controller/0.log" Sep 30 09:59:25 crc kubenswrapper[4730]: I0930 09:59:25.953518 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005405 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-wd5fh"] Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.005633 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovn-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005648 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovn-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.005659 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005666 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.005676 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="nbdb" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005682 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="nbdb" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.005692 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005699 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.005710 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="kubecfg-setup" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005716 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="kubecfg-setup" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.005724 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="sbdb" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005731 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="sbdb" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.005741 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005747 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.005756 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="kube-rbac-proxy-node" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005762 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="kube-rbac-proxy-node" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.005769 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005775 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.005784 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="northd" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005789 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="northd" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.005799 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovn-acl-logging" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005805 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovn-acl-logging" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005892 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005900 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="kube-rbac-proxy-node" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005909 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005917 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005924 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovn-acl-logging" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005931 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="northd" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005937 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005944 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="nbdb" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005954 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="sbdb" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.005961 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovn-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.006045 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.006054 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.006063 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.006069 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.006165 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.006323 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="823c4c28-801d-421e-b15f-02a17e300753" containerName="ovnkube-controller" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.007808 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.096597 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t2frc_98a6f8df-1ac8-4652-8074-90cb180311ad/kube-multus/2.log" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.097225 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t2frc_98a6f8df-1ac8-4652-8074-90cb180311ad/kube-multus/1.log" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.097279 4730 generic.go:334] "Generic (PLEG): container finished" podID="98a6f8df-1ac8-4652-8074-90cb180311ad" containerID="a5bb1f559693666f8926ed88798ba0efed5cfc3fb9c465817367617f57eaf858" exitCode=2 Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.097349 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t2frc" event={"ID":"98a6f8df-1ac8-4652-8074-90cb180311ad","Type":"ContainerDied","Data":"a5bb1f559693666f8926ed88798ba0efed5cfc3fb9c465817367617f57eaf858"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.097386 4730 scope.go:117] "RemoveContainer" containerID="9ed00cdb0dca4ffa70594334507af7834b99fd93be39a245a3b569f39154c2a1" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.098207 4730 scope.go:117] "RemoveContainer" containerID="a5bb1f559693666f8926ed88798ba0efed5cfc3fb9c465817367617f57eaf858" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.098512 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-t2frc_openshift-multus(98a6f8df-1ac8-4652-8074-90cb180311ad)\"" pod="openshift-multus/multus-t2frc" podUID="98a6f8df-1ac8-4652-8074-90cb180311ad" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.101260 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovnkube-controller/3.log" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.104175 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovn-acl-logging/0.log" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.104949 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5vmh_823c4c28-801d-421e-b15f-02a17e300753/ovn-controller/0.log" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105572 4730 generic.go:334] "Generic (PLEG): container finished" podID="823c4c28-801d-421e-b15f-02a17e300753" containerID="f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b" exitCode=0 Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105625 4730 generic.go:334] "Generic (PLEG): container finished" podID="823c4c28-801d-421e-b15f-02a17e300753" containerID="59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2" exitCode=0 Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105635 4730 generic.go:334] "Generic (PLEG): container finished" podID="823c4c28-801d-421e-b15f-02a17e300753" containerID="7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda" exitCode=0 Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105639 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105685 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105699 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105712 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105647 4730 generic.go:334] "Generic (PLEG): container finished" podID="823c4c28-801d-421e-b15f-02a17e300753" containerID="6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323" exitCode=0 Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105736 4730 generic.go:334] "Generic (PLEG): container finished" podID="823c4c28-801d-421e-b15f-02a17e300753" containerID="6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494" exitCode=0 Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105749 4730 generic.go:334] "Generic (PLEG): container finished" podID="823c4c28-801d-421e-b15f-02a17e300753" containerID="566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541" exitCode=0 Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105758 4730 generic.go:334] "Generic (PLEG): container finished" podID="823c4c28-801d-421e-b15f-02a17e300753" containerID="bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc" exitCode=143 Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105767 4730 generic.go:334] "Generic (PLEG): container finished" podID="823c4c28-801d-421e-b15f-02a17e300753" containerID="0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d" exitCode=143 Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105766 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105784 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105870 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105882 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105892 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105899 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105904 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105909 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105914 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105919 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105924 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105929 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105935 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105942 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105949 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105958 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105963 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105968 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105973 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105978 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105983 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105988 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105992 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.105997 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106004 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106013 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106019 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106025 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106031 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106037 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106043 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106049 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106055 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106062 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106068 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106075 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5vmh" event={"ID":"823c4c28-801d-421e-b15f-02a17e300753","Type":"ContainerDied","Data":"b495b3733f13e0b9f0b0ad95249d30857e31a0f6e908b5ff564a45f562b85018"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106084 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106092 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106097 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106102 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106108 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106113 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106118 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106123 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106128 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.106133 4730 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f"} Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.124476 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-ovnkube-script-lib\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.124573 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-log-socket\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.124671 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-cni-netd\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.124708 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-run-ovn-kubernetes\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.124751 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-systemd-units\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.124774 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-run-netns\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.124798 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-node-log\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.124828 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-var-lib-openvswitch\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.124863 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-cni-bin\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.124882 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/823c4c28-801d-421e-b15f-02a17e300753-ovn-node-metrics-cert\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.124906 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-var-lib-cni-networks-ovn-kubernetes\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.124893 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.124933 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-etc-openvswitch\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125008 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125025 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfshb\" (UniqueName: \"kubernetes.io/projected/823c4c28-801d-421e-b15f-02a17e300753-kube-api-access-jfshb\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125043 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125055 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125070 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125085 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-node-log" (OuterVolumeSpecName: "node-log") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125101 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125102 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-kubelet\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125128 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125132 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125139 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-ovn\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125164 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-log-socket" (OuterVolumeSpecName: "log-socket") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125208 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-systemd\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125260 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125277 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-env-overrides\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125302 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125356 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-slash\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125402 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-ovnkube-config\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125429 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-openvswitch\") pod \"823c4c28-801d-421e-b15f-02a17e300753\" (UID: \"823c4c28-801d-421e-b15f-02a17e300753\") " Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125456 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-slash" (OuterVolumeSpecName: "host-slash") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125407 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125583 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125743 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125779 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-systemd-units\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125835 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-node-log\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125883 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.125904 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/73638073-8498-4c61-9f0c-be7a6ef866fe-ovn-node-metrics-cert\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126022 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-run-ovn-kubernetes\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126063 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-run-openvswitch\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126218 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-etc-openvswitch\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126257 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-cni-netd\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126291 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-run-systemd\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126323 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126379 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-log-socket\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126412 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-run-netns\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126500 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-run-ovn\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126525 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-cni-bin\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126568 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-var-lib-openvswitch\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126590 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/73638073-8498-4c61-9f0c-be7a6ef866fe-ovnkube-script-lib\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126688 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-kubelet\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126732 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/73638073-8498-4c61-9f0c-be7a6ef866fe-ovnkube-config\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126761 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6h57t\" (UniqueName: \"kubernetes.io/projected/73638073-8498-4c61-9f0c-be7a6ef866fe-kube-api-access-6h57t\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126796 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-slash\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.126826 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/73638073-8498-4c61-9f0c-be7a6ef866fe-env-overrides\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127007 4730 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127033 4730 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127047 4730 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127062 4730 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127074 4730 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127086 4730 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127098 4730 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127112 4730 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-slash\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127125 4730 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127137 4730 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127150 4730 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/823c4c28-801d-421e-b15f-02a17e300753-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127162 4730 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-log-socket\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127171 4730 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127183 4730 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127195 4730 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127205 4730 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.127215 4730 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-node-log\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.128182 4730 scope.go:117] "RemoveContainer" containerID="f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.131401 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/823c4c28-801d-421e-b15f-02a17e300753-kube-api-access-jfshb" (OuterVolumeSpecName: "kube-api-access-jfshb") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "kube-api-access-jfshb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.131769 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/823c4c28-801d-421e-b15f-02a17e300753-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.138956 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "823c4c28-801d-421e-b15f-02a17e300753" (UID: "823c4c28-801d-421e-b15f-02a17e300753"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.143011 4730 scope.go:117] "RemoveContainer" containerID="380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.157038 4730 scope.go:117] "RemoveContainer" containerID="59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.169339 4730 scope.go:117] "RemoveContainer" containerID="7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.181279 4730 scope.go:117] "RemoveContainer" containerID="6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.193531 4730 scope.go:117] "RemoveContainer" containerID="6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.205354 4730 scope.go:117] "RemoveContainer" containerID="566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.217531 4730 scope.go:117] "RemoveContainer" containerID="bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.228987 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-etc-openvswitch\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229067 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-cni-netd\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229091 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-run-systemd\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229143 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229184 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-log-socket\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229238 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-run-netns\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229274 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-run-ovn\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229327 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-cni-bin\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229350 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-var-lib-openvswitch\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229215 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-etc-openvswitch\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229402 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/73638073-8498-4c61-9f0c-be7a6ef866fe-ovnkube-script-lib\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229437 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-kubelet\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229497 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/73638073-8498-4c61-9f0c-be7a6ef866fe-ovnkube-config\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229521 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6h57t\" (UniqueName: \"kubernetes.io/projected/73638073-8498-4c61-9f0c-be7a6ef866fe-kube-api-access-6h57t\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229573 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-slash\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229596 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/73638073-8498-4c61-9f0c-be7a6ef866fe-env-overrides\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229669 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-systemd-units\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229694 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-node-log\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229752 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/73638073-8498-4c61-9f0c-be7a6ef866fe-ovn-node-metrics-cert\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.229867 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-run-ovn-kubernetes\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.230025 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-run-ovn-kubernetes\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.230056 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-cni-netd\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.230073 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-run-systemd\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.234667 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-node-log\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.234705 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-kubelet\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.234742 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-run-netns\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.234773 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/73638073-8498-4c61-9f0c-be7a6ef866fe-ovnkube-script-lib\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.234795 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.234777 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-slash\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.234823 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-log-socket\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.234838 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-host-cni-bin\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.234867 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-var-lib-openvswitch\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.234889 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-run-ovn\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.234916 4730 scope.go:117] "RemoveContainer" containerID="0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.235988 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/73638073-8498-4c61-9f0c-be7a6ef866fe-ovnkube-config\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.237563 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-systemd-units\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.237747 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-run-openvswitch\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.237761 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/73638073-8498-4c61-9f0c-be7a6ef866fe-run-openvswitch\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.238209 4730 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/823c4c28-801d-421e-b15f-02a17e300753-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.238238 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfshb\" (UniqueName: \"kubernetes.io/projected/823c4c28-801d-421e-b15f-02a17e300753-kube-api-access-jfshb\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.238494 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/73638073-8498-4c61-9f0c-be7a6ef866fe-env-overrides\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.239726 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/73638073-8498-4c61-9f0c-be7a6ef866fe-ovn-node-metrics-cert\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.240781 4730 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/823c4c28-801d-421e-b15f-02a17e300753-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.247721 4730 scope.go:117] "RemoveContainer" containerID="e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.252300 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6h57t\" (UniqueName: \"kubernetes.io/projected/73638073-8498-4c61-9f0c-be7a6ef866fe-kube-api-access-6h57t\") pod \"ovnkube-node-wd5fh\" (UID: \"73638073-8498-4c61-9f0c-be7a6ef866fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.260118 4730 scope.go:117] "RemoveContainer" containerID="f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.260592 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b\": container with ID starting with f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b not found: ID does not exist" containerID="f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.260751 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b"} err="failed to get container status \"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b\": rpc error: code = NotFound desc = could not find container \"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b\": container with ID starting with f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.260835 4730 scope.go:117] "RemoveContainer" containerID="380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.261302 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\": container with ID starting with 380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669 not found: ID does not exist" containerID="380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.261357 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669"} err="failed to get container status \"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\": rpc error: code = NotFound desc = could not find container \"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\": container with ID starting with 380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.261399 4730 scope.go:117] "RemoveContainer" containerID="59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.261795 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\": container with ID starting with 59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2 not found: ID does not exist" containerID="59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.261878 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2"} err="failed to get container status \"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\": rpc error: code = NotFound desc = could not find container \"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\": container with ID starting with 59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.261928 4730 scope.go:117] "RemoveContainer" containerID="7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.262225 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\": container with ID starting with 7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda not found: ID does not exist" containerID="7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.262257 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda"} err="failed to get container status \"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\": rpc error: code = NotFound desc = could not find container \"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\": container with ID starting with 7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.262302 4730 scope.go:117] "RemoveContainer" containerID="6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.262576 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\": container with ID starting with 6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323 not found: ID does not exist" containerID="6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.262601 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323"} err="failed to get container status \"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\": rpc error: code = NotFound desc = could not find container \"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\": container with ID starting with 6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.262635 4730 scope.go:117] "RemoveContainer" containerID="6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.262951 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\": container with ID starting with 6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494 not found: ID does not exist" containerID="6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.263037 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494"} err="failed to get container status \"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\": rpc error: code = NotFound desc = could not find container \"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\": container with ID starting with 6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.263107 4730 scope.go:117] "RemoveContainer" containerID="566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.263426 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\": container with ID starting with 566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541 not found: ID does not exist" containerID="566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.263456 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541"} err="failed to get container status \"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\": rpc error: code = NotFound desc = could not find container \"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\": container with ID starting with 566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.263474 4730 scope.go:117] "RemoveContainer" containerID="bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.263745 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\": container with ID starting with bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc not found: ID does not exist" containerID="bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.263771 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc"} err="failed to get container status \"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\": rpc error: code = NotFound desc = could not find container \"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\": container with ID starting with bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.263786 4730 scope.go:117] "RemoveContainer" containerID="0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.264010 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\": container with ID starting with 0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d not found: ID does not exist" containerID="0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.264029 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d"} err="failed to get container status \"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\": rpc error: code = NotFound desc = could not find container \"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\": container with ID starting with 0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.264045 4730 scope.go:117] "RemoveContainer" containerID="e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f" Sep 30 09:59:26 crc kubenswrapper[4730]: E0930 09:59:26.264307 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\": container with ID starting with e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f not found: ID does not exist" containerID="e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.264344 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f"} err="failed to get container status \"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\": rpc error: code = NotFound desc = could not find container \"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\": container with ID starting with e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.264366 4730 scope.go:117] "RemoveContainer" containerID="f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.264692 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b"} err="failed to get container status \"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b\": rpc error: code = NotFound desc = could not find container \"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b\": container with ID starting with f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.264713 4730 scope.go:117] "RemoveContainer" containerID="380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.264953 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669"} err="failed to get container status \"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\": rpc error: code = NotFound desc = could not find container \"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\": container with ID starting with 380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.264973 4730 scope.go:117] "RemoveContainer" containerID="59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.265194 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2"} err="failed to get container status \"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\": rpc error: code = NotFound desc = could not find container \"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\": container with ID starting with 59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.265211 4730 scope.go:117] "RemoveContainer" containerID="7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.265432 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda"} err="failed to get container status \"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\": rpc error: code = NotFound desc = could not find container \"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\": container with ID starting with 7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.265503 4730 scope.go:117] "RemoveContainer" containerID="6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.265816 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323"} err="failed to get container status \"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\": rpc error: code = NotFound desc = could not find container \"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\": container with ID starting with 6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.265845 4730 scope.go:117] "RemoveContainer" containerID="6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.266081 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494"} err="failed to get container status \"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\": rpc error: code = NotFound desc = could not find container \"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\": container with ID starting with 6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.266105 4730 scope.go:117] "RemoveContainer" containerID="566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.266329 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541"} err="failed to get container status \"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\": rpc error: code = NotFound desc = could not find container \"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\": container with ID starting with 566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.266354 4730 scope.go:117] "RemoveContainer" containerID="bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.266582 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc"} err="failed to get container status \"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\": rpc error: code = NotFound desc = could not find container \"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\": container with ID starting with bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.266600 4730 scope.go:117] "RemoveContainer" containerID="0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.266856 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d"} err="failed to get container status \"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\": rpc error: code = NotFound desc = could not find container \"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\": container with ID starting with 0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.266873 4730 scope.go:117] "RemoveContainer" containerID="e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.267084 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f"} err="failed to get container status \"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\": rpc error: code = NotFound desc = could not find container \"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\": container with ID starting with e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.267159 4730 scope.go:117] "RemoveContainer" containerID="f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.267444 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b"} err="failed to get container status \"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b\": rpc error: code = NotFound desc = could not find container \"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b\": container with ID starting with f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.267466 4730 scope.go:117] "RemoveContainer" containerID="380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.267721 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669"} err="failed to get container status \"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\": rpc error: code = NotFound desc = could not find container \"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\": container with ID starting with 380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.267745 4730 scope.go:117] "RemoveContainer" containerID="59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.267953 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2"} err="failed to get container status \"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\": rpc error: code = NotFound desc = could not find container \"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\": container with ID starting with 59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.267973 4730 scope.go:117] "RemoveContainer" containerID="7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.268239 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda"} err="failed to get container status \"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\": rpc error: code = NotFound desc = could not find container \"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\": container with ID starting with 7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.268277 4730 scope.go:117] "RemoveContainer" containerID="6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.268670 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323"} err="failed to get container status \"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\": rpc error: code = NotFound desc = could not find container \"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\": container with ID starting with 6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.268706 4730 scope.go:117] "RemoveContainer" containerID="6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.268979 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494"} err="failed to get container status \"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\": rpc error: code = NotFound desc = could not find container \"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\": container with ID starting with 6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.269006 4730 scope.go:117] "RemoveContainer" containerID="566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.269267 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541"} err="failed to get container status \"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\": rpc error: code = NotFound desc = could not find container \"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\": container with ID starting with 566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.269343 4730 scope.go:117] "RemoveContainer" containerID="bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.269664 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc"} err="failed to get container status \"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\": rpc error: code = NotFound desc = could not find container \"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\": container with ID starting with bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.269697 4730 scope.go:117] "RemoveContainer" containerID="0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.270063 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d"} err="failed to get container status \"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\": rpc error: code = NotFound desc = could not find container \"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\": container with ID starting with 0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.270143 4730 scope.go:117] "RemoveContainer" containerID="e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.270434 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f"} err="failed to get container status \"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\": rpc error: code = NotFound desc = could not find container \"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\": container with ID starting with e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.270466 4730 scope.go:117] "RemoveContainer" containerID="f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.270742 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b"} err="failed to get container status \"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b\": rpc error: code = NotFound desc = could not find container \"f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b\": container with ID starting with f2cbd154ce1dde6b387c050a30b9930a0c6d6cd1faab1f0a03af36431025789b not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.270764 4730 scope.go:117] "RemoveContainer" containerID="380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.270990 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669"} err="failed to get container status \"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\": rpc error: code = NotFound desc = could not find container \"380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669\": container with ID starting with 380a34335df704ec5f2fb810525f314739ef1efe35a54dd9e23f95b66389a669 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.271070 4730 scope.go:117] "RemoveContainer" containerID="59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.271404 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2"} err="failed to get container status \"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\": rpc error: code = NotFound desc = could not find container \"59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2\": container with ID starting with 59538b49ec4037e4bd1ccb035a0263f32db2504037aca2ab484dc9323f80b5d2 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.271431 4730 scope.go:117] "RemoveContainer" containerID="7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.271701 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda"} err="failed to get container status \"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\": rpc error: code = NotFound desc = could not find container \"7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda\": container with ID starting with 7d99f5927ded358669fa88d47fb6a8d8bad2808bfdd07920056227828e478eda not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.271816 4730 scope.go:117] "RemoveContainer" containerID="6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.272221 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323"} err="failed to get container status \"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\": rpc error: code = NotFound desc = could not find container \"6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323\": container with ID starting with 6fbe6a18bf72a902960dfc551de099c3956696ca5ac78ff5afc30a93768be323 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.272250 4730 scope.go:117] "RemoveContainer" containerID="6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.272543 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494"} err="failed to get container status \"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\": rpc error: code = NotFound desc = could not find container \"6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494\": container with ID starting with 6f988662b16857e74e7c88eb9e69279e7b5807aa2831cdd6ed9a7cab0a6b2494 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.272569 4730 scope.go:117] "RemoveContainer" containerID="566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.273025 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541"} err="failed to get container status \"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\": rpc error: code = NotFound desc = could not find container \"566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541\": container with ID starting with 566f950ac85f82874c332e01386898784e3b3e37ee3c422f8b73f6e732cd4541 not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.273046 4730 scope.go:117] "RemoveContainer" containerID="bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.273453 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc"} err="failed to get container status \"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\": rpc error: code = NotFound desc = could not find container \"bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc\": container with ID starting with bd7abb6daba8cf38e3d8cc62ab526acb2fe714e07477eb5d1ecf77e145cf60dc not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.273487 4730 scope.go:117] "RemoveContainer" containerID="0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.273869 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d"} err="failed to get container status \"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\": rpc error: code = NotFound desc = could not find container \"0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d\": container with ID starting with 0d525f8baccabf7bc3b8067dbbc59d76851878ccc48a9fbe61c637b6fe57e01d not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.273956 4730 scope.go:117] "RemoveContainer" containerID="e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.274253 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f"} err="failed to get container status \"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\": rpc error: code = NotFound desc = could not find container \"e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f\": container with ID starting with e4fcd3f6fd645e486e741ae4ddea7a8669c849d729860c651131da25638a393f not found: ID does not exist" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.324738 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.448378 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-c5vmh"] Sep 30 09:59:26 crc kubenswrapper[4730]: I0930 09:59:26.454134 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-c5vmh"] Sep 30 09:59:27 crc kubenswrapper[4730]: I0930 09:59:27.112798 4730 generic.go:334] "Generic (PLEG): container finished" podID="73638073-8498-4c61-9f0c-be7a6ef866fe" containerID="32e87854b3235ac772e573f2f8125c72d5f1db1475377dbcf4721030d5c3f9eb" exitCode=0 Sep 30 09:59:27 crc kubenswrapper[4730]: I0930 09:59:27.112883 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" event={"ID":"73638073-8498-4c61-9f0c-be7a6ef866fe","Type":"ContainerDied","Data":"32e87854b3235ac772e573f2f8125c72d5f1db1475377dbcf4721030d5c3f9eb"} Sep 30 09:59:27 crc kubenswrapper[4730]: I0930 09:59:27.112919 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" event={"ID":"73638073-8498-4c61-9f0c-be7a6ef866fe","Type":"ContainerStarted","Data":"f9df405e889705912c8b887b8b0c47dfa161c5e4a8841479ac36847430e5a3f8"} Sep 30 09:59:27 crc kubenswrapper[4730]: I0930 09:59:27.115900 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t2frc_98a6f8df-1ac8-4652-8074-90cb180311ad/kube-multus/2.log" Sep 30 09:59:28 crc kubenswrapper[4730]: I0930 09:59:28.127400 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" event={"ID":"73638073-8498-4c61-9f0c-be7a6ef866fe","Type":"ContainerStarted","Data":"564a6a672553e5e33e109b0826f900aeccb5af84a05b9f9d258da0dbb7226ea4"} Sep 30 09:59:28 crc kubenswrapper[4730]: I0930 09:59:28.127901 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" event={"ID":"73638073-8498-4c61-9f0c-be7a6ef866fe","Type":"ContainerStarted","Data":"110f3720746e3e657a49ebad7978232a1d505a20dcb71ae5e0ead979ee55dbbf"} Sep 30 09:59:28 crc kubenswrapper[4730]: I0930 09:59:28.127914 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" event={"ID":"73638073-8498-4c61-9f0c-be7a6ef866fe","Type":"ContainerStarted","Data":"00b8c996aadc5b0ce09af7767598f96b5700d67225a898325e49c23e55170ec8"} Sep 30 09:59:28 crc kubenswrapper[4730]: I0930 09:59:28.127924 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" event={"ID":"73638073-8498-4c61-9f0c-be7a6ef866fe","Type":"ContainerStarted","Data":"48a14b723f109b5a417a52d76504ee9e024e840b7fc714b9200483558f39e1c3"} Sep 30 09:59:28 crc kubenswrapper[4730]: I0930 09:59:28.127932 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" event={"ID":"73638073-8498-4c61-9f0c-be7a6ef866fe","Type":"ContainerStarted","Data":"bd8f50efecb414f5ecd0ec5ed1273a67e0cf0374b8e309888421c843274ebdee"} Sep 30 09:59:28 crc kubenswrapper[4730]: I0930 09:59:28.127942 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" event={"ID":"73638073-8498-4c61-9f0c-be7a6ef866fe","Type":"ContainerStarted","Data":"c97b16ca1dec6aac2dd7a2592b36b61215b6c32a817ebc4b267065c8ba2dde01"} Sep 30 09:59:28 crc kubenswrapper[4730]: I0930 09:59:28.387975 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="823c4c28-801d-421e-b15f-02a17e300753" path="/var/lib/kubelet/pods/823c4c28-801d-421e-b15f-02a17e300753/volumes" Sep 30 09:59:30 crc kubenswrapper[4730]: I0930 09:59:30.141758 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" event={"ID":"73638073-8498-4c61-9f0c-be7a6ef866fe","Type":"ContainerStarted","Data":"e1e5188d8c2021c0ff16dbacd27bb55cd48b671f6a3a7829d04127ed7b428e5c"} Sep 30 09:59:33 crc kubenswrapper[4730]: I0930 09:59:33.163354 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" event={"ID":"73638073-8498-4c61-9f0c-be7a6ef866fe","Type":"ContainerStarted","Data":"00e80888cb9d89b54e409b697bd1677c4be02e815c9ac630a14ff69799710500"} Sep 30 09:59:33 crc kubenswrapper[4730]: I0930 09:59:33.163977 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:33 crc kubenswrapper[4730]: I0930 09:59:33.164058 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:33 crc kubenswrapper[4730]: I0930 09:59:33.164123 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:33 crc kubenswrapper[4730]: I0930 09:59:33.194700 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" podStartSLOduration=8.194679967 podStartE2EDuration="8.194679967s" podCreationTimestamp="2025-09-30 09:59:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 09:59:33.192784127 +0000 UTC m=+617.526044140" watchObservedRunningTime="2025-09-30 09:59:33.194679967 +0000 UTC m=+617.527939960" Sep 30 09:59:33 crc kubenswrapper[4730]: I0930 09:59:33.202488 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:33 crc kubenswrapper[4730]: I0930 09:59:33.203163 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:39 crc kubenswrapper[4730]: I0930 09:59:39.381157 4730 scope.go:117] "RemoveContainer" containerID="a5bb1f559693666f8926ed88798ba0efed5cfc3fb9c465817367617f57eaf858" Sep 30 09:59:39 crc kubenswrapper[4730]: E0930 09:59:39.381919 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-t2frc_openshift-multus(98a6f8df-1ac8-4652-8074-90cb180311ad)\"" pod="openshift-multus/multus-t2frc" podUID="98a6f8df-1ac8-4652-8074-90cb180311ad" Sep 30 09:59:51 crc kubenswrapper[4730]: I0930 09:59:51.380691 4730 scope.go:117] "RemoveContainer" containerID="a5bb1f559693666f8926ed88798ba0efed5cfc3fb9c465817367617f57eaf858" Sep 30 09:59:52 crc kubenswrapper[4730]: I0930 09:59:52.268923 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-t2frc_98a6f8df-1ac8-4652-8074-90cb180311ad/kube-multus/2.log" Sep 30 09:59:52 crc kubenswrapper[4730]: I0930 09:59:52.269254 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-t2frc" event={"ID":"98a6f8df-1ac8-4652-8074-90cb180311ad","Type":"ContainerStarted","Data":"cf8ba59f5e2ab7bc960991c878a2a7be78a50b0b59be0ea03c8beac6e19ef737"} Sep 30 09:59:53 crc kubenswrapper[4730]: I0930 09:59:53.742738 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq"] Sep 30 09:59:53 crc kubenswrapper[4730]: I0930 09:59:53.744018 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" Sep 30 09:59:53 crc kubenswrapper[4730]: I0930 09:59:53.747202 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 09:59:53 crc kubenswrapper[4730]: I0930 09:59:53.761365 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq"] Sep 30 09:59:53 crc kubenswrapper[4730]: I0930 09:59:53.796386 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s228\" (UniqueName: \"kubernetes.io/projected/76182bda-e874-4b16-9a53-164f47f7ccb5-kube-api-access-4s228\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq\" (UID: \"76182bda-e874-4b16-9a53-164f47f7ccb5\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" Sep 30 09:59:53 crc kubenswrapper[4730]: I0930 09:59:53.796490 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/76182bda-e874-4b16-9a53-164f47f7ccb5-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq\" (UID: \"76182bda-e874-4b16-9a53-164f47f7ccb5\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" Sep 30 09:59:53 crc kubenswrapper[4730]: I0930 09:59:53.796536 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/76182bda-e874-4b16-9a53-164f47f7ccb5-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq\" (UID: \"76182bda-e874-4b16-9a53-164f47f7ccb5\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" Sep 30 09:59:53 crc kubenswrapper[4730]: I0930 09:59:53.898198 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s228\" (UniqueName: \"kubernetes.io/projected/76182bda-e874-4b16-9a53-164f47f7ccb5-kube-api-access-4s228\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq\" (UID: \"76182bda-e874-4b16-9a53-164f47f7ccb5\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" Sep 30 09:59:53 crc kubenswrapper[4730]: I0930 09:59:53.898281 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/76182bda-e874-4b16-9a53-164f47f7ccb5-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq\" (UID: \"76182bda-e874-4b16-9a53-164f47f7ccb5\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" Sep 30 09:59:53 crc kubenswrapper[4730]: I0930 09:59:53.898336 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/76182bda-e874-4b16-9a53-164f47f7ccb5-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq\" (UID: \"76182bda-e874-4b16-9a53-164f47f7ccb5\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" Sep 30 09:59:53 crc kubenswrapper[4730]: I0930 09:59:53.899052 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/76182bda-e874-4b16-9a53-164f47f7ccb5-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq\" (UID: \"76182bda-e874-4b16-9a53-164f47f7ccb5\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" Sep 30 09:59:53 crc kubenswrapper[4730]: I0930 09:59:53.899118 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/76182bda-e874-4b16-9a53-164f47f7ccb5-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq\" (UID: \"76182bda-e874-4b16-9a53-164f47f7ccb5\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" Sep 30 09:59:53 crc kubenswrapper[4730]: I0930 09:59:53.925148 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s228\" (UniqueName: \"kubernetes.io/projected/76182bda-e874-4b16-9a53-164f47f7ccb5-kube-api-access-4s228\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq\" (UID: \"76182bda-e874-4b16-9a53-164f47f7ccb5\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" Sep 30 09:59:54 crc kubenswrapper[4730]: I0930 09:59:54.062270 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" Sep 30 09:59:54 crc kubenswrapper[4730]: I0930 09:59:54.280448 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq"] Sep 30 09:59:55 crc kubenswrapper[4730]: I0930 09:59:55.287340 4730 generic.go:334] "Generic (PLEG): container finished" podID="76182bda-e874-4b16-9a53-164f47f7ccb5" containerID="c2f508e0c023c14d60f510b434aed8e5128187a5485ba44d16899c5b2f91c0b8" exitCode=0 Sep 30 09:59:55 crc kubenswrapper[4730]: I0930 09:59:55.287385 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" event={"ID":"76182bda-e874-4b16-9a53-164f47f7ccb5","Type":"ContainerDied","Data":"c2f508e0c023c14d60f510b434aed8e5128187a5485ba44d16899c5b2f91c0b8"} Sep 30 09:59:55 crc kubenswrapper[4730]: I0930 09:59:55.287664 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" event={"ID":"76182bda-e874-4b16-9a53-164f47f7ccb5","Type":"ContainerStarted","Data":"4150aefa0a57f391efbc6d31037650ac88ce6ec753e686c8198be51334fecca7"} Sep 30 09:59:56 crc kubenswrapper[4730]: I0930 09:59:56.353626 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-wd5fh" Sep 30 09:59:57 crc kubenswrapper[4730]: I0930 09:59:57.298893 4730 generic.go:334] "Generic (PLEG): container finished" podID="76182bda-e874-4b16-9a53-164f47f7ccb5" containerID="0cdb8ec2cd6b1c55546f49b53512967bebe4f424a40bfbea614a35909d9f1b77" exitCode=0 Sep 30 09:59:57 crc kubenswrapper[4730]: I0930 09:59:57.298945 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" event={"ID":"76182bda-e874-4b16-9a53-164f47f7ccb5","Type":"ContainerDied","Data":"0cdb8ec2cd6b1c55546f49b53512967bebe4f424a40bfbea614a35909d9f1b77"} Sep 30 09:59:58 crc kubenswrapper[4730]: I0930 09:59:58.306292 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" event={"ID":"76182bda-e874-4b16-9a53-164f47f7ccb5","Type":"ContainerStarted","Data":"6ca04fb5c387ecbcf7c992a5756b53dc1db31716eb6e8169b0f56e57427e2bd6"} Sep 30 09:59:58 crc kubenswrapper[4730]: I0930 09:59:58.331426 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" podStartSLOduration=4.188975948 podStartE2EDuration="5.331403771s" podCreationTimestamp="2025-09-30 09:59:53 +0000 UTC" firstStartedPulling="2025-09-30 09:59:55.288517094 +0000 UTC m=+639.621777087" lastFinishedPulling="2025-09-30 09:59:56.430944907 +0000 UTC m=+640.764204910" observedRunningTime="2025-09-30 09:59:58.326003867 +0000 UTC m=+642.659263860" watchObservedRunningTime="2025-09-30 09:59:58.331403771 +0000 UTC m=+642.664663794" Sep 30 09:59:59 crc kubenswrapper[4730]: I0930 09:59:59.314033 4730 generic.go:334] "Generic (PLEG): container finished" podID="76182bda-e874-4b16-9a53-164f47f7ccb5" containerID="6ca04fb5c387ecbcf7c992a5756b53dc1db31716eb6e8169b0f56e57427e2bd6" exitCode=0 Sep 30 09:59:59 crc kubenswrapper[4730]: I0930 09:59:59.314093 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" event={"ID":"76182bda-e874-4b16-9a53-164f47f7ccb5","Type":"ContainerDied","Data":"6ca04fb5c387ecbcf7c992a5756b53dc1db31716eb6e8169b0f56e57427e2bd6"} Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.132739 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9"] Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.133409 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.135190 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.135785 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.144202 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9"] Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.281407 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-config-volume\") pod \"collect-profiles-29320440-ddpz9\" (UID: \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.281463 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-secret-volume\") pod \"collect-profiles-29320440-ddpz9\" (UID: \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.281548 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7vz8\" (UniqueName: \"kubernetes.io/projected/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-kube-api-access-b7vz8\") pod \"collect-profiles-29320440-ddpz9\" (UID: \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.383093 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-config-volume\") pod \"collect-profiles-29320440-ddpz9\" (UID: \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.383143 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-secret-volume\") pod \"collect-profiles-29320440-ddpz9\" (UID: \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.383233 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7vz8\" (UniqueName: \"kubernetes.io/projected/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-kube-api-access-b7vz8\") pod \"collect-profiles-29320440-ddpz9\" (UID: \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.384631 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-config-volume\") pod \"collect-profiles-29320440-ddpz9\" (UID: \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.396429 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-secret-volume\") pod \"collect-profiles-29320440-ddpz9\" (UID: \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.407063 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7vz8\" (UniqueName: \"kubernetes.io/projected/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-kube-api-access-b7vz8\") pod \"collect-profiles-29320440-ddpz9\" (UID: \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.448460 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.525553 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.585396 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/76182bda-e874-4b16-9a53-164f47f7ccb5-bundle\") pod \"76182bda-e874-4b16-9a53-164f47f7ccb5\" (UID: \"76182bda-e874-4b16-9a53-164f47f7ccb5\") " Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.585576 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4s228\" (UniqueName: \"kubernetes.io/projected/76182bda-e874-4b16-9a53-164f47f7ccb5-kube-api-access-4s228\") pod \"76182bda-e874-4b16-9a53-164f47f7ccb5\" (UID: \"76182bda-e874-4b16-9a53-164f47f7ccb5\") " Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.585600 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/76182bda-e874-4b16-9a53-164f47f7ccb5-util\") pod \"76182bda-e874-4b16-9a53-164f47f7ccb5\" (UID: \"76182bda-e874-4b16-9a53-164f47f7ccb5\") " Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.588541 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76182bda-e874-4b16-9a53-164f47f7ccb5-bundle" (OuterVolumeSpecName: "bundle") pod "76182bda-e874-4b16-9a53-164f47f7ccb5" (UID: "76182bda-e874-4b16-9a53-164f47f7ccb5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.590434 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76182bda-e874-4b16-9a53-164f47f7ccb5-kube-api-access-4s228" (OuterVolumeSpecName: "kube-api-access-4s228") pod "76182bda-e874-4b16-9a53-164f47f7ccb5" (UID: "76182bda-e874-4b16-9a53-164f47f7ccb5"). InnerVolumeSpecName "kube-api-access-4s228". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.597396 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76182bda-e874-4b16-9a53-164f47f7ccb5-util" (OuterVolumeSpecName: "util") pod "76182bda-e874-4b16-9a53-164f47f7ccb5" (UID: "76182bda-e874-4b16-9a53-164f47f7ccb5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.633252 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9"] Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.687582 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4s228\" (UniqueName: \"kubernetes.io/projected/76182bda-e874-4b16-9a53-164f47f7ccb5-kube-api-access-4s228\") on node \"crc\" DevicePath \"\"" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.687640 4730 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/76182bda-e874-4b16-9a53-164f47f7ccb5-util\") on node \"crc\" DevicePath \"\"" Sep 30 10:00:00 crc kubenswrapper[4730]: I0930 10:00:00.687654 4730 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/76182bda-e874-4b16-9a53-164f47f7ccb5-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:00:01 crc kubenswrapper[4730]: I0930 10:00:01.326098 4730 generic.go:334] "Generic (PLEG): container finished" podID="ec8158aa-12b3-457a-9ad3-7c7da7819f1c" containerID="3789d69ce5f0a1bb8549c278ad2732ec821cfaa4a7a02b77047ccc1636bfeae7" exitCode=0 Sep 30 10:00:01 crc kubenswrapper[4730]: I0930 10:00:01.326173 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" event={"ID":"ec8158aa-12b3-457a-9ad3-7c7da7819f1c","Type":"ContainerDied","Data":"3789d69ce5f0a1bb8549c278ad2732ec821cfaa4a7a02b77047ccc1636bfeae7"} Sep 30 10:00:01 crc kubenswrapper[4730]: I0930 10:00:01.326456 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" event={"ID":"ec8158aa-12b3-457a-9ad3-7c7da7819f1c","Type":"ContainerStarted","Data":"b296d779c222a481ba6ebed9997e10eba53d6ebbe0a3d818c1ccc1a666239303"} Sep 30 10:00:01 crc kubenswrapper[4730]: I0930 10:00:01.329110 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" event={"ID":"76182bda-e874-4b16-9a53-164f47f7ccb5","Type":"ContainerDied","Data":"4150aefa0a57f391efbc6d31037650ac88ce6ec753e686c8198be51334fecca7"} Sep 30 10:00:01 crc kubenswrapper[4730]: I0930 10:00:01.329156 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4150aefa0a57f391efbc6d31037650ac88ce6ec753e686c8198be51334fecca7" Sep 30 10:00:01 crc kubenswrapper[4730]: I0930 10:00:01.329252 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq" Sep 30 10:00:02 crc kubenswrapper[4730]: I0930 10:00:02.534493 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" Sep 30 10:00:02 crc kubenswrapper[4730]: I0930 10:00:02.613086 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-secret-volume\") pod \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\" (UID: \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\") " Sep 30 10:00:02 crc kubenswrapper[4730]: I0930 10:00:02.613159 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7vz8\" (UniqueName: \"kubernetes.io/projected/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-kube-api-access-b7vz8\") pod \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\" (UID: \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\") " Sep 30 10:00:02 crc kubenswrapper[4730]: I0930 10:00:02.613246 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-config-volume\") pod \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\" (UID: \"ec8158aa-12b3-457a-9ad3-7c7da7819f1c\") " Sep 30 10:00:02 crc kubenswrapper[4730]: I0930 10:00:02.614332 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-config-volume" (OuterVolumeSpecName: "config-volume") pod "ec8158aa-12b3-457a-9ad3-7c7da7819f1c" (UID: "ec8158aa-12b3-457a-9ad3-7c7da7819f1c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:00:02 crc kubenswrapper[4730]: I0930 10:00:02.619079 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ec8158aa-12b3-457a-9ad3-7c7da7819f1c" (UID: "ec8158aa-12b3-457a-9ad3-7c7da7819f1c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:00:02 crc kubenswrapper[4730]: I0930 10:00:02.619218 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-kube-api-access-b7vz8" (OuterVolumeSpecName: "kube-api-access-b7vz8") pod "ec8158aa-12b3-457a-9ad3-7c7da7819f1c" (UID: "ec8158aa-12b3-457a-9ad3-7c7da7819f1c"). InnerVolumeSpecName "kube-api-access-b7vz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:00:02 crc kubenswrapper[4730]: I0930 10:00:02.714626 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7vz8\" (UniqueName: \"kubernetes.io/projected/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-kube-api-access-b7vz8\") on node \"crc\" DevicePath \"\"" Sep 30 10:00:02 crc kubenswrapper[4730]: I0930 10:00:02.714667 4730 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 10:00:02 crc kubenswrapper[4730]: I0930 10:00:02.714684 4730 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ec8158aa-12b3-457a-9ad3-7c7da7819f1c-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 10:00:03 crc kubenswrapper[4730]: I0930 10:00:03.341244 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" event={"ID":"ec8158aa-12b3-457a-9ad3-7c7da7819f1c","Type":"ContainerDied","Data":"b296d779c222a481ba6ebed9997e10eba53d6ebbe0a3d818c1ccc1a666239303"} Sep 30 10:00:03 crc kubenswrapper[4730]: I0930 10:00:03.341295 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b296d779c222a481ba6ebed9997e10eba53d6ebbe0a3d818c1ccc1a666239303" Sep 30 10:00:03 crc kubenswrapper[4730]: I0930 10:00:03.341311 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9" Sep 30 10:00:03 crc kubenswrapper[4730]: E0930 10:00:03.427606 4730 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec8158aa_12b3_457a_9ad3_7c7da7819f1c.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec8158aa_12b3_457a_9ad3_7c7da7819f1c.slice/crio-b296d779c222a481ba6ebed9997e10eba53d6ebbe0a3d818c1ccc1a666239303\": RecentStats: unable to find data in memory cache]" Sep 30 10:00:10 crc kubenswrapper[4730]: I0930 10:00:10.894114 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-kv7bf"] Sep 30 10:00:10 crc kubenswrapper[4730]: E0930 10:00:10.894954 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76182bda-e874-4b16-9a53-164f47f7ccb5" containerName="extract" Sep 30 10:00:10 crc kubenswrapper[4730]: I0930 10:00:10.894970 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="76182bda-e874-4b16-9a53-164f47f7ccb5" containerName="extract" Sep 30 10:00:10 crc kubenswrapper[4730]: E0930 10:00:10.894983 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76182bda-e874-4b16-9a53-164f47f7ccb5" containerName="util" Sep 30 10:00:10 crc kubenswrapper[4730]: I0930 10:00:10.894990 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="76182bda-e874-4b16-9a53-164f47f7ccb5" containerName="util" Sep 30 10:00:10 crc kubenswrapper[4730]: E0930 10:00:10.895007 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec8158aa-12b3-457a-9ad3-7c7da7819f1c" containerName="collect-profiles" Sep 30 10:00:10 crc kubenswrapper[4730]: I0930 10:00:10.895015 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec8158aa-12b3-457a-9ad3-7c7da7819f1c" containerName="collect-profiles" Sep 30 10:00:10 crc kubenswrapper[4730]: E0930 10:00:10.895033 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76182bda-e874-4b16-9a53-164f47f7ccb5" containerName="pull" Sep 30 10:00:10 crc kubenswrapper[4730]: I0930 10:00:10.895042 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="76182bda-e874-4b16-9a53-164f47f7ccb5" containerName="pull" Sep 30 10:00:10 crc kubenswrapper[4730]: I0930 10:00:10.895156 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="76182bda-e874-4b16-9a53-164f47f7ccb5" containerName="extract" Sep 30 10:00:10 crc kubenswrapper[4730]: I0930 10:00:10.895166 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec8158aa-12b3-457a-9ad3-7c7da7819f1c" containerName="collect-profiles" Sep 30 10:00:10 crc kubenswrapper[4730]: I0930 10:00:10.895637 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-kv7bf" Sep 30 10:00:10 crc kubenswrapper[4730]: I0930 10:00:10.897460 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Sep 30 10:00:10 crc kubenswrapper[4730]: I0930 10:00:10.898102 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Sep 30 10:00:10 crc kubenswrapper[4730]: I0930 10:00:10.902791 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-vfjg6" Sep 30 10:00:10 crc kubenswrapper[4730]: I0930 10:00:10.908124 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-kv7bf"] Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.017408 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkrv4\" (UniqueName: \"kubernetes.io/projected/ad98526d-6d03-4564-849c-5ae4d06519e2-kube-api-access-vkrv4\") pod \"obo-prometheus-operator-7c8cf85677-kv7bf\" (UID: \"ad98526d-6d03-4564-849c-5ae4d06519e2\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-kv7bf" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.027716 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk"] Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.029036 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.032195 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.032289 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-q59zh" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.041546 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk"] Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.074736 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4"] Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.077017 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.081374 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4"] Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.119331 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4\" (UID: \"5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.119405 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkrv4\" (UniqueName: \"kubernetes.io/projected/ad98526d-6d03-4564-849c-5ae4d06519e2-kube-api-access-vkrv4\") pod \"obo-prometheus-operator-7c8cf85677-kv7bf\" (UID: \"ad98526d-6d03-4564-849c-5ae4d06519e2\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-kv7bf" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.119429 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk\" (UID: \"e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.119487 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4\" (UID: \"5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.119513 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk\" (UID: \"e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.137535 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-b68z8"] Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.138233 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.140808 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.141123 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-qr8qc" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.144926 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkrv4\" (UniqueName: \"kubernetes.io/projected/ad98526d-6d03-4564-849c-5ae4d06519e2-kube-api-access-vkrv4\") pod \"obo-prometheus-operator-7c8cf85677-kv7bf\" (UID: \"ad98526d-6d03-4564-849c-5ae4d06519e2\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-kv7bf" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.208536 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-b68z8"] Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.212749 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-kv7bf" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.220471 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4\" (UID: \"5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.220525 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk\" (UID: \"e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.220551 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4\" (UID: \"5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.220579 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw96s\" (UniqueName: \"kubernetes.io/projected/2b9be7f9-7237-4a96-b0a3-9052ab5b0eea-kube-api-access-tw96s\") pod \"observability-operator-cc5f78dfc-b68z8\" (UID: \"2b9be7f9-7237-4a96-b0a3-9052ab5b0eea\") " pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.220604 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk\" (UID: \"e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.220685 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/2b9be7f9-7237-4a96-b0a3-9052ab5b0eea-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-b68z8\" (UID: \"2b9be7f9-7237-4a96-b0a3-9052ab5b0eea\") " pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.223882 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4\" (UID: \"5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.224499 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4\" (UID: \"5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.225098 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk\" (UID: \"e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.226087 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk\" (UID: \"e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.321931 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw96s\" (UniqueName: \"kubernetes.io/projected/2b9be7f9-7237-4a96-b0a3-9052ab5b0eea-kube-api-access-tw96s\") pod \"observability-operator-cc5f78dfc-b68z8\" (UID: \"2b9be7f9-7237-4a96-b0a3-9052ab5b0eea\") " pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.327051 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/2b9be7f9-7237-4a96-b0a3-9052ab5b0eea-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-b68z8\" (UID: \"2b9be7f9-7237-4a96-b0a3-9052ab5b0eea\") " pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.337771 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/2b9be7f9-7237-4a96-b0a3-9052ab5b0eea-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-b68z8\" (UID: \"2b9be7f9-7237-4a96-b0a3-9052ab5b0eea\") " pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.349061 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-5gr7s"] Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.349994 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-5gr7s" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.352784 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-f8wnf" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.357245 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.362254 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-5gr7s"] Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.371516 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw96s\" (UniqueName: \"kubernetes.io/projected/2b9be7f9-7237-4a96-b0a3-9052ab5b0eea-kube-api-access-tw96s\") pod \"observability-operator-cc5f78dfc-b68z8\" (UID: \"2b9be7f9-7237-4a96-b0a3-9052ab5b0eea\") " pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.394112 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.428469 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/1f3b0abd-5e6c-4afe-9122-606234241336-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-5gr7s\" (UID: \"1f3b0abd-5e6c-4afe-9122-606234241336\") " pod="openshift-operators/perses-operator-54bc95c9fb-5gr7s" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.428578 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvk6p\" (UniqueName: \"kubernetes.io/projected/1f3b0abd-5e6c-4afe-9122-606234241336-kube-api-access-jvk6p\") pod \"perses-operator-54bc95c9fb-5gr7s\" (UID: \"1f3b0abd-5e6c-4afe-9122-606234241336\") " pod="openshift-operators/perses-operator-54bc95c9fb-5gr7s" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.468857 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.518098 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-kv7bf"] Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.529376 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvk6p\" (UniqueName: \"kubernetes.io/projected/1f3b0abd-5e6c-4afe-9122-606234241336-kube-api-access-jvk6p\") pod \"perses-operator-54bc95c9fb-5gr7s\" (UID: \"1f3b0abd-5e6c-4afe-9122-606234241336\") " pod="openshift-operators/perses-operator-54bc95c9fb-5gr7s" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.529457 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/1f3b0abd-5e6c-4afe-9122-606234241336-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-5gr7s\" (UID: \"1f3b0abd-5e6c-4afe-9122-606234241336\") " pod="openshift-operators/perses-operator-54bc95c9fb-5gr7s" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.530849 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/1f3b0abd-5e6c-4afe-9122-606234241336-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-5gr7s\" (UID: \"1f3b0abd-5e6c-4afe-9122-606234241336\") " pod="openshift-operators/perses-operator-54bc95c9fb-5gr7s" Sep 30 10:00:11 crc kubenswrapper[4730]: W0930 10:00:11.541887 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad98526d_6d03_4564_849c_5ae4d06519e2.slice/crio-9389affa2d433d60bcbfb8f6eb3cae6f808f9613a0c181b9219ef54eb3ebcd80 WatchSource:0}: Error finding container 9389affa2d433d60bcbfb8f6eb3cae6f808f9613a0c181b9219ef54eb3ebcd80: Status 404 returned error can't find the container with id 9389affa2d433d60bcbfb8f6eb3cae6f808f9613a0c181b9219ef54eb3ebcd80 Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.553324 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvk6p\" (UniqueName: \"kubernetes.io/projected/1f3b0abd-5e6c-4afe-9122-606234241336-kube-api-access-jvk6p\") pod \"perses-operator-54bc95c9fb-5gr7s\" (UID: \"1f3b0abd-5e6c-4afe-9122-606234241336\") " pod="openshift-operators/perses-operator-54bc95c9fb-5gr7s" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.718129 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-5gr7s" Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.939440 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk"] Sep 30 10:00:11 crc kubenswrapper[4730]: W0930 10:00:11.949132 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1ad7bb1_aa06_43d2_bd4f_1f53a4bc360a.slice/crio-9d01a87a836882da8aaea43681025ef1890fbb9a82ab4edb1e8fde9b89f8cd53 WatchSource:0}: Error finding container 9d01a87a836882da8aaea43681025ef1890fbb9a82ab4edb1e8fde9b89f8cd53: Status 404 returned error can't find the container with id 9d01a87a836882da8aaea43681025ef1890fbb9a82ab4edb1e8fde9b89f8cd53 Sep 30 10:00:11 crc kubenswrapper[4730]: I0930 10:00:11.992149 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-b68z8"] Sep 30 10:00:12 crc kubenswrapper[4730]: I0930 10:00:12.001602 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4"] Sep 30 10:00:12 crc kubenswrapper[4730]: W0930 10:00:12.009463 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ab94ac6_ffcc_42a9_b00e_f3ef3eb5df1d.slice/crio-c9b75045039662298156fd5570959ca1718b00218817f4b6a400df63544bf461 WatchSource:0}: Error finding container c9b75045039662298156fd5570959ca1718b00218817f4b6a400df63544bf461: Status 404 returned error can't find the container with id c9b75045039662298156fd5570959ca1718b00218817f4b6a400df63544bf461 Sep 30 10:00:12 crc kubenswrapper[4730]: I0930 10:00:12.227598 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-5gr7s"] Sep 30 10:00:12 crc kubenswrapper[4730]: W0930 10:00:12.243733 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f3b0abd_5e6c_4afe_9122_606234241336.slice/crio-15733f872c1edf8a8fa5e08fabfedb5c10c8a04d5b387de7863314716038449c WatchSource:0}: Error finding container 15733f872c1edf8a8fa5e08fabfedb5c10c8a04d5b387de7863314716038449c: Status 404 returned error can't find the container with id 15733f872c1edf8a8fa5e08fabfedb5c10c8a04d5b387de7863314716038449c Sep 30 10:00:12 crc kubenswrapper[4730]: I0930 10:00:12.396628 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-5gr7s" event={"ID":"1f3b0abd-5e6c-4afe-9122-606234241336","Type":"ContainerStarted","Data":"15733f872c1edf8a8fa5e08fabfedb5c10c8a04d5b387de7863314716038449c"} Sep 30 10:00:12 crc kubenswrapper[4730]: I0930 10:00:12.397463 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" event={"ID":"2b9be7f9-7237-4a96-b0a3-9052ab5b0eea","Type":"ContainerStarted","Data":"1ce6e5ffca9631589871c36c003fd0c372ba39af87e5fa6c2f912217024132c6"} Sep 30 10:00:12 crc kubenswrapper[4730]: I0930 10:00:12.398453 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-kv7bf" event={"ID":"ad98526d-6d03-4564-849c-5ae4d06519e2","Type":"ContainerStarted","Data":"9389affa2d433d60bcbfb8f6eb3cae6f808f9613a0c181b9219ef54eb3ebcd80"} Sep 30 10:00:12 crc kubenswrapper[4730]: I0930 10:00:12.399371 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk" event={"ID":"e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a","Type":"ContainerStarted","Data":"9d01a87a836882da8aaea43681025ef1890fbb9a82ab4edb1e8fde9b89f8cd53"} Sep 30 10:00:12 crc kubenswrapper[4730]: I0930 10:00:12.400213 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4" event={"ID":"5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d","Type":"ContainerStarted","Data":"c9b75045039662298156fd5570959ca1718b00218817f4b6a400df63544bf461"} Sep 30 10:00:32 crc kubenswrapper[4730]: E0930 10:00:32.077129 4730 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:27ffe36aad6e606e6d0a211f48f3cdb58a53aa0d5e8ead6a444427231261ab9e" Sep 30 10:00:32 crc kubenswrapper[4730]: E0930 10:00:32.077905 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:27ffe36aad6e606e6d0a211f48f3cdb58a53aa0d5e8ead6a444427231261ab9e,Command:[],Args:[--namespace=$(NAMESPACE) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=perses=$(RELATED_IMAGE_PERSES) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:4d25b0e31549d780928d2dd3eed7defd9c6d460deb92dcff0fe72c5023029404,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:a0a1d0e39de54c5b2786c2b82d0104f358b479135c069075ddd4f7cd76826c00,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:f3806c97420ec8ba91895ce7627df7612cccb927c05d7854377f45cdd6c924a8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-0-50-rhel9@sha256:4b5e53d226733237fc5abd0476eb3c96162cf3d8da7aeba8deda631fa8987223,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-0-4-rhel9@sha256:53125bddbefca2ba2b57c3fd74bd4b376da803e420201220548878f557bd6610,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-1-0-rhel9@sha256:1dbe9a684271e00c8f36d8b96c9b22f6ee3c6f907ea6ad20980901bd533f9a3a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-0-4-rhel9@sha256:6aafab2c90bcbc6702f2d63d585a764baa8de8207e6af7afa60f3976ddfa9bd3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-0-3-rhel9@sha256:9f80851e8137c2c5e5c2aee13fc663f6c7124d9524d88c06c1507748ce84e1ed,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-6-1-rhel9@sha256:2c9b2be12f15f06a24393dbab6a31682cee399d42e2cc04b0dcf03b2b598d5cf,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-6-0-rhel9@sha256:e9042d93f624790c450724158a8323277e4dd136530c763fec8db31f51fd8552,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-0-4-rhel9@sha256:456d45001816b9adc38745e0ad8705bdc0150d03d0f65e0dfa9caf3fb8980fad,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-0-5-rhel9@sha256:f3446969c67c18b44bee38ac946091fe9397a2117cb5b7aacb39406461c1efe1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-0-4-rhel9@sha256:ade84f8be7d23bd4b9c80e07462dc947280f0bcf6071e6edd927fef54c254b7e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:039e139cf9217bbe72248674df76cbe4baf4bef9f8dc367d2cb51eae9c4aa9d7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:142180f277f0221ef2d4176f9af6dcdb4e7ab434a68f0dfad2ee5bee0e667ddd,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.2.2,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tw96s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-cc5f78dfc-b68z8_openshift-operators(2b9be7f9-7237-4a96-b0a3-9052ab5b0eea): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 10:00:32 crc kubenswrapper[4730]: E0930 10:00:32.079841 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" podUID="2b9be7f9-7237-4a96-b0a3-9052ab5b0eea" Sep 30 10:00:32 crc kubenswrapper[4730]: E0930 10:00:32.566521 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:27ffe36aad6e606e6d0a211f48f3cdb58a53aa0d5e8ead6a444427231261ab9e\\\"\"" pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" podUID="2b9be7f9-7237-4a96-b0a3-9052ab5b0eea" Sep 30 10:00:33 crc kubenswrapper[4730]: E0930 10:00:33.001898 4730 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e2681bce57dc9c15701f5591532c2dfe8f19778606661339553a28dc003dbca5" Sep 30 10:00:33 crc kubenswrapper[4730]: E0930 10:00:33.002154 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e2681bce57dc9c15701f5591532c2dfe8f19778606661339553a28dc003dbca5,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:8597c48fc71fc6ec8e87dbe40dace4dbb7b817c1039db608af76a0d90f7ac2d0,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.2.2,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vkrv4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-7c8cf85677-kv7bf_openshift-operators(ad98526d-6d03-4564-849c-5ae4d06519e2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 10:00:33 crc kubenswrapper[4730]: E0930 10:00:33.003392 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-kv7bf" podUID="ad98526d-6d03-4564-849c-5ae4d06519e2" Sep 30 10:00:33 crc kubenswrapper[4730]: I0930 10:00:33.570247 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk" event={"ID":"e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a","Type":"ContainerStarted","Data":"bdf5bb46da2de29e7465ad63995069b745a46571b41e06ea9838a1076208716c"} Sep 30 10:00:33 crc kubenswrapper[4730]: I0930 10:00:33.572622 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4" event={"ID":"5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d","Type":"ContainerStarted","Data":"a2b53c6b0ef7a7effedf16481048f09fde56c94cbffc7bdf67bc1bda91655a13"} Sep 30 10:00:33 crc kubenswrapper[4730]: I0930 10:00:33.574286 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-5gr7s" event={"ID":"1f3b0abd-5e6c-4afe-9122-606234241336","Type":"ContainerStarted","Data":"cb106df096517ccf3c655ed88cdf966a4b4a4d6cdb8611318c69e90fa34c2c01"} Sep 30 10:00:33 crc kubenswrapper[4730]: I0930 10:00:33.574450 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-5gr7s" Sep 30 10:00:33 crc kubenswrapper[4730]: E0930 10:00:33.576127 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e2681bce57dc9c15701f5591532c2dfe8f19778606661339553a28dc003dbca5\\\"\"" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-kv7bf" podUID="ad98526d-6d03-4564-849c-5ae4d06519e2" Sep 30 10:00:33 crc kubenswrapper[4730]: I0930 10:00:33.590973 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk" podStartSLOduration=1.522063787 podStartE2EDuration="22.590932366s" podCreationTimestamp="2025-09-30 10:00:11 +0000 UTC" firstStartedPulling="2025-09-30 10:00:11.953543431 +0000 UTC m=+656.286803424" lastFinishedPulling="2025-09-30 10:00:33.02241201 +0000 UTC m=+677.355672003" observedRunningTime="2025-09-30 10:00:33.586680464 +0000 UTC m=+677.919940457" watchObservedRunningTime="2025-09-30 10:00:33.590932366 +0000 UTC m=+677.924192359" Sep 30 10:00:33 crc kubenswrapper[4730]: I0930 10:00:33.605727 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4" podStartSLOduration=1.5893756140000002 podStartE2EDuration="22.605701983s" podCreationTimestamp="2025-09-30 10:00:11 +0000 UTC" firstStartedPulling="2025-09-30 10:00:12.016203496 +0000 UTC m=+656.349463489" lastFinishedPulling="2025-09-30 10:00:33.032529865 +0000 UTC m=+677.365789858" observedRunningTime="2025-09-30 10:00:33.6029311 +0000 UTC m=+677.936191093" watchObservedRunningTime="2025-09-30 10:00:33.605701983 +0000 UTC m=+677.938961986" Sep 30 10:00:33 crc kubenswrapper[4730]: I0930 10:00:33.664778 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-5gr7s" podStartSLOduration=1.850257478 podStartE2EDuration="22.664753862s" podCreationTimestamp="2025-09-30 10:00:11 +0000 UTC" firstStartedPulling="2025-09-30 10:00:12.246356174 +0000 UTC m=+656.579616167" lastFinishedPulling="2025-09-30 10:00:33.060852558 +0000 UTC m=+677.394112551" observedRunningTime="2025-09-30 10:00:33.664206308 +0000 UTC m=+677.997466301" watchObservedRunningTime="2025-09-30 10:00:33.664753862 +0000 UTC m=+677.998013855" Sep 30 10:00:41 crc kubenswrapper[4730]: I0930 10:00:41.722565 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-5gr7s" Sep 30 10:00:48 crc kubenswrapper[4730]: I0930 10:00:48.669590 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-kv7bf" event={"ID":"ad98526d-6d03-4564-849c-5ae4d06519e2","Type":"ContainerStarted","Data":"6fabcdbc21076d6fb892248adb5f6aa154ae6c889b9237182b8000c9db528ae5"} Sep 30 10:00:49 crc kubenswrapper[4730]: I0930 10:00:49.676327 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" event={"ID":"2b9be7f9-7237-4a96-b0a3-9052ab5b0eea","Type":"ContainerStarted","Data":"cd82c6096269d8713e68bcb1a84d9adb0366c5e67e24fa4d4ce1bb2274561add"} Sep 30 10:00:49 crc kubenswrapper[4730]: I0930 10:00:49.676666 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" Sep 30 10:00:49 crc kubenswrapper[4730]: I0930 10:00:49.678155 4730 patch_prober.go:28] interesting pod/observability-operator-cc5f78dfc-b68z8 container/operator namespace/openshift-operators: Readiness probe status=failure output="Get \"http://10.217.0.45:8081/healthz\": dial tcp 10.217.0.45:8081: connect: connection refused" start-of-body= Sep 30 10:00:49 crc kubenswrapper[4730]: I0930 10:00:49.678200 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" podUID="2b9be7f9-7237-4a96-b0a3-9052ab5b0eea" containerName="operator" probeResult="failure" output="Get \"http://10.217.0.45:8081/healthz\": dial tcp 10.217.0.45:8081: connect: connection refused" Sep 30 10:00:49 crc kubenswrapper[4730]: I0930 10:00:49.693573 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" podStartSLOduration=1.208533152 podStartE2EDuration="38.693552725s" podCreationTimestamp="2025-09-30 10:00:11 +0000 UTC" firstStartedPulling="2025-09-30 10:00:12.009096929 +0000 UTC m=+656.342356932" lastFinishedPulling="2025-09-30 10:00:49.494116502 +0000 UTC m=+693.827376505" observedRunningTime="2025-09-30 10:00:49.693057492 +0000 UTC m=+694.026317495" watchObservedRunningTime="2025-09-30 10:00:49.693552725 +0000 UTC m=+694.026812718" Sep 30 10:00:49 crc kubenswrapper[4730]: I0930 10:00:49.696407 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-kv7bf" podStartSLOduration=2.925626185 podStartE2EDuration="39.696396669s" podCreationTimestamp="2025-09-30 10:00:10 +0000 UTC" firstStartedPulling="2025-09-30 10:00:11.546363238 +0000 UTC m=+655.879623231" lastFinishedPulling="2025-09-30 10:00:48.317133722 +0000 UTC m=+692.650393715" observedRunningTime="2025-09-30 10:00:48.693305022 +0000 UTC m=+693.026565015" watchObservedRunningTime="2025-09-30 10:00:49.696396669 +0000 UTC m=+694.029656662" Sep 30 10:00:50 crc kubenswrapper[4730]: I0930 10:00:50.682407 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-b68z8" Sep 30 10:01:02 crc kubenswrapper[4730]: I0930 10:01:02.337075 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:01:02 crc kubenswrapper[4730]: I0930 10:01:02.337723 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.516377 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf"] Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.517834 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.519532 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.531922 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf"] Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.608082 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d8fd2777-1d8a-4947-a1c0-686fdc13c679-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf\" (UID: \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.608214 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d8fd2777-1d8a-4947-a1c0-686fdc13c679-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf\" (UID: \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.608295 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b62kr\" (UniqueName: \"kubernetes.io/projected/d8fd2777-1d8a-4947-a1c0-686fdc13c679-kube-api-access-b62kr\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf\" (UID: \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.709343 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d8fd2777-1d8a-4947-a1c0-686fdc13c679-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf\" (UID: \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.709729 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b62kr\" (UniqueName: \"kubernetes.io/projected/d8fd2777-1d8a-4947-a1c0-686fdc13c679-kube-api-access-b62kr\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf\" (UID: \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.709768 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d8fd2777-1d8a-4947-a1c0-686fdc13c679-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf\" (UID: \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.709818 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d8fd2777-1d8a-4947-a1c0-686fdc13c679-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf\" (UID: \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.710101 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d8fd2777-1d8a-4947-a1c0-686fdc13c679-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf\" (UID: \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.726248 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b62kr\" (UniqueName: \"kubernetes.io/projected/d8fd2777-1d8a-4947-a1c0-686fdc13c679-kube-api-access-b62kr\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf\" (UID: \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" Sep 30 10:01:07 crc kubenswrapper[4730]: I0930 10:01:07.833984 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" Sep 30 10:01:08 crc kubenswrapper[4730]: I0930 10:01:08.280967 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf"] Sep 30 10:01:08 crc kubenswrapper[4730]: I0930 10:01:08.785102 4730 generic.go:334] "Generic (PLEG): container finished" podID="d8fd2777-1d8a-4947-a1c0-686fdc13c679" containerID="a59aa702b49d251d620611da6c546e7731703c7b32b18f2fc7e2839e91d9f927" exitCode=0 Sep 30 10:01:08 crc kubenswrapper[4730]: I0930 10:01:08.785161 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" event={"ID":"d8fd2777-1d8a-4947-a1c0-686fdc13c679","Type":"ContainerDied","Data":"a59aa702b49d251d620611da6c546e7731703c7b32b18f2fc7e2839e91d9f927"} Sep 30 10:01:08 crc kubenswrapper[4730]: I0930 10:01:08.785209 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" event={"ID":"d8fd2777-1d8a-4947-a1c0-686fdc13c679","Type":"ContainerStarted","Data":"5c7cbf0e778935c4d07e0e93f3afcf3d4b8d98146d9401c912e36e97cbd6057e"} Sep 30 10:01:10 crc kubenswrapper[4730]: I0930 10:01:10.796964 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" event={"ID":"d8fd2777-1d8a-4947-a1c0-686fdc13c679","Type":"ContainerStarted","Data":"91f38c25c220cc45a31310b3f9b35b4bd7a7f63f99c3f33846b219f177a5474a"} Sep 30 10:01:11 crc kubenswrapper[4730]: I0930 10:01:11.804582 4730 generic.go:334] "Generic (PLEG): container finished" podID="d8fd2777-1d8a-4947-a1c0-686fdc13c679" containerID="91f38c25c220cc45a31310b3f9b35b4bd7a7f63f99c3f33846b219f177a5474a" exitCode=0 Sep 30 10:01:11 crc kubenswrapper[4730]: I0930 10:01:11.804686 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" event={"ID":"d8fd2777-1d8a-4947-a1c0-686fdc13c679","Type":"ContainerDied","Data":"91f38c25c220cc45a31310b3f9b35b4bd7a7f63f99c3f33846b219f177a5474a"} Sep 30 10:01:12 crc kubenswrapper[4730]: I0930 10:01:12.811960 4730 generic.go:334] "Generic (PLEG): container finished" podID="d8fd2777-1d8a-4947-a1c0-686fdc13c679" containerID="afd917307be7e334ce52c0de8ee1f33bebe5922abb5d30dfab52065b8c28d5cd" exitCode=0 Sep 30 10:01:12 crc kubenswrapper[4730]: I0930 10:01:12.812021 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" event={"ID":"d8fd2777-1d8a-4947-a1c0-686fdc13c679","Type":"ContainerDied","Data":"afd917307be7e334ce52c0de8ee1f33bebe5922abb5d30dfab52065b8c28d5cd"} Sep 30 10:01:14 crc kubenswrapper[4730]: I0930 10:01:14.053535 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" Sep 30 10:01:14 crc kubenswrapper[4730]: I0930 10:01:14.200149 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d8fd2777-1d8a-4947-a1c0-686fdc13c679-util\") pod \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\" (UID: \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\") " Sep 30 10:01:14 crc kubenswrapper[4730]: I0930 10:01:14.200306 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d8fd2777-1d8a-4947-a1c0-686fdc13c679-bundle\") pod \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\" (UID: \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\") " Sep 30 10:01:14 crc kubenswrapper[4730]: I0930 10:01:14.200423 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b62kr\" (UniqueName: \"kubernetes.io/projected/d8fd2777-1d8a-4947-a1c0-686fdc13c679-kube-api-access-b62kr\") pod \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\" (UID: \"d8fd2777-1d8a-4947-a1c0-686fdc13c679\") " Sep 30 10:01:14 crc kubenswrapper[4730]: I0930 10:01:14.202628 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8fd2777-1d8a-4947-a1c0-686fdc13c679-bundle" (OuterVolumeSpecName: "bundle") pod "d8fd2777-1d8a-4947-a1c0-686fdc13c679" (UID: "d8fd2777-1d8a-4947-a1c0-686fdc13c679"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:01:14 crc kubenswrapper[4730]: I0930 10:01:14.208739 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8fd2777-1d8a-4947-a1c0-686fdc13c679-kube-api-access-b62kr" (OuterVolumeSpecName: "kube-api-access-b62kr") pod "d8fd2777-1d8a-4947-a1c0-686fdc13c679" (UID: "d8fd2777-1d8a-4947-a1c0-686fdc13c679"). InnerVolumeSpecName "kube-api-access-b62kr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:01:14 crc kubenswrapper[4730]: I0930 10:01:14.211540 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8fd2777-1d8a-4947-a1c0-686fdc13c679-util" (OuterVolumeSpecName: "util") pod "d8fd2777-1d8a-4947-a1c0-686fdc13c679" (UID: "d8fd2777-1d8a-4947-a1c0-686fdc13c679"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:01:14 crc kubenswrapper[4730]: I0930 10:01:14.301527 4730 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d8fd2777-1d8a-4947-a1c0-686fdc13c679-util\") on node \"crc\" DevicePath \"\"" Sep 30 10:01:14 crc kubenswrapper[4730]: I0930 10:01:14.301576 4730 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d8fd2777-1d8a-4947-a1c0-686fdc13c679-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:01:14 crc kubenswrapper[4730]: I0930 10:01:14.301601 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b62kr\" (UniqueName: \"kubernetes.io/projected/d8fd2777-1d8a-4947-a1c0-686fdc13c679-kube-api-access-b62kr\") on node \"crc\" DevicePath \"\"" Sep 30 10:01:14 crc kubenswrapper[4730]: I0930 10:01:14.823831 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" event={"ID":"d8fd2777-1d8a-4947-a1c0-686fdc13c679","Type":"ContainerDied","Data":"5c7cbf0e778935c4d07e0e93f3afcf3d4b8d98146d9401c912e36e97cbd6057e"} Sep 30 10:01:14 crc kubenswrapper[4730]: I0930 10:01:14.823904 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c7cbf0e778935c4d07e0e93f3afcf3d4b8d98146d9401c912e36e97cbd6057e" Sep 30 10:01:14 crc kubenswrapper[4730]: I0930 10:01:14.823918 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.130662 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-krpvk"] Sep 30 10:01:19 crc kubenswrapper[4730]: E0930 10:01:19.131259 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8fd2777-1d8a-4947-a1c0-686fdc13c679" containerName="util" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.131276 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8fd2777-1d8a-4947-a1c0-686fdc13c679" containerName="util" Sep 30 10:01:19 crc kubenswrapper[4730]: E0930 10:01:19.131290 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8fd2777-1d8a-4947-a1c0-686fdc13c679" containerName="pull" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.131298 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8fd2777-1d8a-4947-a1c0-686fdc13c679" containerName="pull" Sep 30 10:01:19 crc kubenswrapper[4730]: E0930 10:01:19.131309 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8fd2777-1d8a-4947-a1c0-686fdc13c679" containerName="extract" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.131316 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8fd2777-1d8a-4947-a1c0-686fdc13c679" containerName="extract" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.131452 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8fd2777-1d8a-4947-a1c0-686fdc13c679" containerName="extract" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.131997 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-krpvk" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.135457 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.135754 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.136320 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-lcmb8" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.152479 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-krpvk"] Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.278761 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d26qn\" (UniqueName: \"kubernetes.io/projected/4d5a580e-a60b-4854-838b-f51fb9e32536-kube-api-access-d26qn\") pod \"nmstate-operator-5d6f6cfd66-krpvk\" (UID: \"4d5a580e-a60b-4854-838b-f51fb9e32536\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-krpvk" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.380057 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d26qn\" (UniqueName: \"kubernetes.io/projected/4d5a580e-a60b-4854-838b-f51fb9e32536-kube-api-access-d26qn\") pod \"nmstate-operator-5d6f6cfd66-krpvk\" (UID: \"4d5a580e-a60b-4854-838b-f51fb9e32536\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-krpvk" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.403449 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d26qn\" (UniqueName: \"kubernetes.io/projected/4d5a580e-a60b-4854-838b-f51fb9e32536-kube-api-access-d26qn\") pod \"nmstate-operator-5d6f6cfd66-krpvk\" (UID: \"4d5a580e-a60b-4854-838b-f51fb9e32536\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-krpvk" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.445724 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-krpvk" Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.681869 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-krpvk"] Sep 30 10:01:19 crc kubenswrapper[4730]: I0930 10:01:19.853884 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-krpvk" event={"ID":"4d5a580e-a60b-4854-838b-f51fb9e32536","Type":"ContainerStarted","Data":"f31edc4b13ed6fdbed4fe0108e5c8f1e090422dc26f33f0a4b9efb5609536e75"} Sep 30 10:01:22 crc kubenswrapper[4730]: I0930 10:01:22.868536 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-krpvk" event={"ID":"4d5a580e-a60b-4854-838b-f51fb9e32536","Type":"ContainerStarted","Data":"8922d83d73590d5ecf8443878f72e23fe1d73cbb293512d448cee94144facdcd"} Sep 30 10:01:22 crc kubenswrapper[4730]: I0930 10:01:22.888083 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-krpvk" podStartSLOduration=0.996175424 podStartE2EDuration="3.888057991s" podCreationTimestamp="2025-09-30 10:01:19 +0000 UTC" firstStartedPulling="2025-09-30 10:01:19.701426997 +0000 UTC m=+724.034686990" lastFinishedPulling="2025-09-30 10:01:22.593309564 +0000 UTC m=+726.926569557" observedRunningTime="2025-09-30 10:01:22.887700361 +0000 UTC m=+727.220960364" watchObservedRunningTime="2025-09-30 10:01:22.888057991 +0000 UTC m=+727.221317984" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.346916 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-5pc6s"] Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.349309 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-5pc6s" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.352178 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-l4zwp" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.358869 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-5pc6s"] Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.374175 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-6znsv"] Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.375102 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-6znsv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.379582 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.391050 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-5j4jt"] Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.392051 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.397943 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-6znsv"] Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.499636 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a4994298-b2c8-4c02-9196-58d0cd805da1-nmstate-lock\") pod \"nmstate-handler-5j4jt\" (UID: \"a4994298-b2c8-4c02-9196-58d0cd805da1\") " pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.499699 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xb9v\" (UniqueName: \"kubernetes.io/projected/05ca167b-1d36-4bd0-82f0-07b82f5e9a7d-kube-api-access-9xb9v\") pod \"nmstate-webhook-6d689559c5-6znsv\" (UID: \"05ca167b-1d36-4bd0-82f0-07b82f5e9a7d\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-6znsv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.499721 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a4994298-b2c8-4c02-9196-58d0cd805da1-dbus-socket\") pod \"nmstate-handler-5j4jt\" (UID: \"a4994298-b2c8-4c02-9196-58d0cd805da1\") " pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.500186 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a4994298-b2c8-4c02-9196-58d0cd805da1-ovs-socket\") pod \"nmstate-handler-5j4jt\" (UID: \"a4994298-b2c8-4c02-9196-58d0cd805da1\") " pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.500287 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrfwh\" (UniqueName: \"kubernetes.io/projected/a4994298-b2c8-4c02-9196-58d0cd805da1-kube-api-access-qrfwh\") pod \"nmstate-handler-5j4jt\" (UID: \"a4994298-b2c8-4c02-9196-58d0cd805da1\") " pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.500326 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/05ca167b-1d36-4bd0-82f0-07b82f5e9a7d-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-6znsv\" (UID: \"05ca167b-1d36-4bd0-82f0-07b82f5e9a7d\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-6znsv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.500392 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cbm2\" (UniqueName: \"kubernetes.io/projected/24231826-1571-4b73-ae50-bc95035399b2-kube-api-access-5cbm2\") pod \"nmstate-metrics-58fcddf996-5pc6s\" (UID: \"24231826-1571-4b73-ae50-bc95035399b2\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-5pc6s" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.518007 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp"] Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.518739 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.523547 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-fxn9t" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.523548 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.523573 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.527527 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp"] Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.601696 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrfwh\" (UniqueName: \"kubernetes.io/projected/a4994298-b2c8-4c02-9196-58d0cd805da1-kube-api-access-qrfwh\") pod \"nmstate-handler-5j4jt\" (UID: \"a4994298-b2c8-4c02-9196-58d0cd805da1\") " pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.601749 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/05ca167b-1d36-4bd0-82f0-07b82f5e9a7d-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-6znsv\" (UID: \"05ca167b-1d36-4bd0-82f0-07b82f5e9a7d\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-6znsv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.601778 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cbm2\" (UniqueName: \"kubernetes.io/projected/24231826-1571-4b73-ae50-bc95035399b2-kube-api-access-5cbm2\") pod \"nmstate-metrics-58fcddf996-5pc6s\" (UID: \"24231826-1571-4b73-ae50-bc95035399b2\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-5pc6s" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.601818 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a4994298-b2c8-4c02-9196-58d0cd805da1-nmstate-lock\") pod \"nmstate-handler-5j4jt\" (UID: \"a4994298-b2c8-4c02-9196-58d0cd805da1\") " pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.601841 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xb9v\" (UniqueName: \"kubernetes.io/projected/05ca167b-1d36-4bd0-82f0-07b82f5e9a7d-kube-api-access-9xb9v\") pod \"nmstate-webhook-6d689559c5-6znsv\" (UID: \"05ca167b-1d36-4bd0-82f0-07b82f5e9a7d\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-6znsv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.601858 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a4994298-b2c8-4c02-9196-58d0cd805da1-dbus-socket\") pod \"nmstate-handler-5j4jt\" (UID: \"a4994298-b2c8-4c02-9196-58d0cd805da1\") " pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.601885 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkbv2\" (UniqueName: \"kubernetes.io/projected/63a29940-b6e3-47cf-b71b-f010806ae889-kube-api-access-zkbv2\") pod \"nmstate-console-plugin-864bb6dfb5-2tzsp\" (UID: \"63a29940-b6e3-47cf-b71b-f010806ae889\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.601910 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/63a29940-b6e3-47cf-b71b-f010806ae889-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-2tzsp\" (UID: \"63a29940-b6e3-47cf-b71b-f010806ae889\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.601941 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/63a29940-b6e3-47cf-b71b-f010806ae889-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-2tzsp\" (UID: \"63a29940-b6e3-47cf-b71b-f010806ae889\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.601959 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a4994298-b2c8-4c02-9196-58d0cd805da1-ovs-socket\") pod \"nmstate-handler-5j4jt\" (UID: \"a4994298-b2c8-4c02-9196-58d0cd805da1\") " pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.602032 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a4994298-b2c8-4c02-9196-58d0cd805da1-ovs-socket\") pod \"nmstate-handler-5j4jt\" (UID: \"a4994298-b2c8-4c02-9196-58d0cd805da1\") " pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.602373 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a4994298-b2c8-4c02-9196-58d0cd805da1-nmstate-lock\") pod \"nmstate-handler-5j4jt\" (UID: \"a4994298-b2c8-4c02-9196-58d0cd805da1\") " pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.602425 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a4994298-b2c8-4c02-9196-58d0cd805da1-dbus-socket\") pod \"nmstate-handler-5j4jt\" (UID: \"a4994298-b2c8-4c02-9196-58d0cd805da1\") " pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.608777 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/05ca167b-1d36-4bd0-82f0-07b82f5e9a7d-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-6znsv\" (UID: \"05ca167b-1d36-4bd0-82f0-07b82f5e9a7d\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-6znsv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.621481 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xb9v\" (UniqueName: \"kubernetes.io/projected/05ca167b-1d36-4bd0-82f0-07b82f5e9a7d-kube-api-access-9xb9v\") pod \"nmstate-webhook-6d689559c5-6znsv\" (UID: \"05ca167b-1d36-4bd0-82f0-07b82f5e9a7d\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-6znsv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.627177 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cbm2\" (UniqueName: \"kubernetes.io/projected/24231826-1571-4b73-ae50-bc95035399b2-kube-api-access-5cbm2\") pod \"nmstate-metrics-58fcddf996-5pc6s\" (UID: \"24231826-1571-4b73-ae50-bc95035399b2\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-5pc6s" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.633218 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrfwh\" (UniqueName: \"kubernetes.io/projected/a4994298-b2c8-4c02-9196-58d0cd805da1-kube-api-access-qrfwh\") pod \"nmstate-handler-5j4jt\" (UID: \"a4994298-b2c8-4c02-9196-58d0cd805da1\") " pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.666782 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-5pc6s" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.692952 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-6znsv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.703268 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkbv2\" (UniqueName: \"kubernetes.io/projected/63a29940-b6e3-47cf-b71b-f010806ae889-kube-api-access-zkbv2\") pod \"nmstate-console-plugin-864bb6dfb5-2tzsp\" (UID: \"63a29940-b6e3-47cf-b71b-f010806ae889\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.703323 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/63a29940-b6e3-47cf-b71b-f010806ae889-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-2tzsp\" (UID: \"63a29940-b6e3-47cf-b71b-f010806ae889\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.703374 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/63a29940-b6e3-47cf-b71b-f010806ae889-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-2tzsp\" (UID: \"63a29940-b6e3-47cf-b71b-f010806ae889\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" Sep 30 10:01:28 crc kubenswrapper[4730]: E0930 10:01:28.703891 4730 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Sep 30 10:01:28 crc kubenswrapper[4730]: E0930 10:01:28.703961 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/63a29940-b6e3-47cf-b71b-f010806ae889-plugin-serving-cert podName:63a29940-b6e3-47cf-b71b-f010806ae889 nodeName:}" failed. No retries permitted until 2025-09-30 10:01:29.203941095 +0000 UTC m=+733.537201088 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/63a29940-b6e3-47cf-b71b-f010806ae889-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-2tzsp" (UID: "63a29940-b6e3-47cf-b71b-f010806ae889") : secret "plugin-serving-cert" not found Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.704409 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/63a29940-b6e3-47cf-b71b-f010806ae889-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-2tzsp\" (UID: \"63a29940-b6e3-47cf-b71b-f010806ae889\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.706760 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.730011 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkbv2\" (UniqueName: \"kubernetes.io/projected/63a29940-b6e3-47cf-b71b-f010806ae889-kube-api-access-zkbv2\") pod \"nmstate-console-plugin-864bb6dfb5-2tzsp\" (UID: \"63a29940-b6e3-47cf-b71b-f010806ae889\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.739759 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6d9f495d8c-xc4gv"] Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.740847 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.755764 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6d9f495d8c-xc4gv"] Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.805071 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dfb860b9-181c-4a77-8e8f-23307100a892-trusted-ca-bundle\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.805157 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dfb860b9-181c-4a77-8e8f-23307100a892-console-serving-cert\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.805222 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dfb860b9-181c-4a77-8e8f-23307100a892-console-config\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.805243 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dfb860b9-181c-4a77-8e8f-23307100a892-oauth-serving-cert\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.805264 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9vj5\" (UniqueName: \"kubernetes.io/projected/dfb860b9-181c-4a77-8e8f-23307100a892-kube-api-access-q9vj5\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.805281 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dfb860b9-181c-4a77-8e8f-23307100a892-console-oauth-config\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.805298 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dfb860b9-181c-4a77-8e8f-23307100a892-service-ca\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.906024 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dfb860b9-181c-4a77-8e8f-23307100a892-trusted-ca-bundle\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.906110 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dfb860b9-181c-4a77-8e8f-23307100a892-console-serving-cert\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.906143 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-5j4jt" event={"ID":"a4994298-b2c8-4c02-9196-58d0cd805da1","Type":"ContainerStarted","Data":"a1f8e98ab2c2897a6e1ec5db8c039230c48690eb2206420331cbf2d3082296f8"} Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.906154 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dfb860b9-181c-4a77-8e8f-23307100a892-console-config\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.906298 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dfb860b9-181c-4a77-8e8f-23307100a892-oauth-serving-cert\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.906386 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9vj5\" (UniqueName: \"kubernetes.io/projected/dfb860b9-181c-4a77-8e8f-23307100a892-kube-api-access-q9vj5\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.906440 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dfb860b9-181c-4a77-8e8f-23307100a892-console-oauth-config\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.906507 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dfb860b9-181c-4a77-8e8f-23307100a892-service-ca\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.907116 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dfb860b9-181c-4a77-8e8f-23307100a892-console-config\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.907951 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dfb860b9-181c-4a77-8e8f-23307100a892-oauth-serving-cert\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.907998 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dfb860b9-181c-4a77-8e8f-23307100a892-service-ca\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.908880 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dfb860b9-181c-4a77-8e8f-23307100a892-trusted-ca-bundle\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.911017 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dfb860b9-181c-4a77-8e8f-23307100a892-console-serving-cert\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.911031 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dfb860b9-181c-4a77-8e8f-23307100a892-console-oauth-config\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:28 crc kubenswrapper[4730]: I0930 10:01:28.926345 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9vj5\" (UniqueName: \"kubernetes.io/projected/dfb860b9-181c-4a77-8e8f-23307100a892-kube-api-access-q9vj5\") pod \"console-6d9f495d8c-xc4gv\" (UID: \"dfb860b9-181c-4a77-8e8f-23307100a892\") " pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.076512 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.139882 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-6znsv"] Sep 30 10:01:29 crc kubenswrapper[4730]: W0930 10:01:29.145162 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05ca167b_1d36_4bd0_82f0_07b82f5e9a7d.slice/crio-a1f72c1be79cb98c29a86ad6b90327e63c310ba5ca31e24aaf8130a335c3837c WatchSource:0}: Error finding container a1f72c1be79cb98c29a86ad6b90327e63c310ba5ca31e24aaf8130a335c3837c: Status 404 returned error can't find the container with id a1f72c1be79cb98c29a86ad6b90327e63c310ba5ca31e24aaf8130a335c3837c Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.208842 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-5pc6s"] Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.210870 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/63a29940-b6e3-47cf-b71b-f010806ae889-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-2tzsp\" (UID: \"63a29940-b6e3-47cf-b71b-f010806ae889\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" Sep 30 10:01:29 crc kubenswrapper[4730]: W0930 10:01:29.214752 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod24231826_1571_4b73_ae50_bc95035399b2.slice/crio-74b53a23c1b244fd6f6d6591be0b6469935468d29d39419e748dc6102682fbed WatchSource:0}: Error finding container 74b53a23c1b244fd6f6d6591be0b6469935468d29d39419e748dc6102682fbed: Status 404 returned error can't find the container with id 74b53a23c1b244fd6f6d6591be0b6469935468d29d39419e748dc6102682fbed Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.216531 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/63a29940-b6e3-47cf-b71b-f010806ae889-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-2tzsp\" (UID: \"63a29940-b6e3-47cf-b71b-f010806ae889\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.433226 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.507240 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6d9f495d8c-xc4gv"] Sep 30 10:01:29 crc kubenswrapper[4730]: W0930 10:01:29.520513 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfb860b9_181c_4a77_8e8f_23307100a892.slice/crio-f1d881a3507f528521cb959b95957e593a1a17eebdb7e2c509b0067706da759c WatchSource:0}: Error finding container f1d881a3507f528521cb959b95957e593a1a17eebdb7e2c509b0067706da759c: Status 404 returned error can't find the container with id f1d881a3507f528521cb959b95957e593a1a17eebdb7e2c509b0067706da759c Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.713115 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp"] Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.913197 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-6znsv" event={"ID":"05ca167b-1d36-4bd0-82f0-07b82f5e9a7d","Type":"ContainerStarted","Data":"a1f72c1be79cb98c29a86ad6b90327e63c310ba5ca31e24aaf8130a335c3837c"} Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.914632 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6d9f495d8c-xc4gv" event={"ID":"dfb860b9-181c-4a77-8e8f-23307100a892","Type":"ContainerStarted","Data":"e2fe28d2e177af9867c78d0adab1dd986469f4de0910618b9faf7176d8ab77da"} Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.914876 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6d9f495d8c-xc4gv" event={"ID":"dfb860b9-181c-4a77-8e8f-23307100a892","Type":"ContainerStarted","Data":"f1d881a3507f528521cb959b95957e593a1a17eebdb7e2c509b0067706da759c"} Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.915833 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" event={"ID":"63a29940-b6e3-47cf-b71b-f010806ae889","Type":"ContainerStarted","Data":"cf90728980ad8906612fc0ce365b3349bc53204e468ed4db04b2d579477563ea"} Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.916756 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-5pc6s" event={"ID":"24231826-1571-4b73-ae50-bc95035399b2","Type":"ContainerStarted","Data":"74b53a23c1b244fd6f6d6591be0b6469935468d29d39419e748dc6102682fbed"} Sep 30 10:01:29 crc kubenswrapper[4730]: I0930 10:01:29.934735 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6d9f495d8c-xc4gv" podStartSLOduration=1.9347161019999999 podStartE2EDuration="1.934716102s" podCreationTimestamp="2025-09-30 10:01:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:01:29.931934848 +0000 UTC m=+734.265194841" watchObservedRunningTime="2025-09-30 10:01:29.934716102 +0000 UTC m=+734.267976095" Sep 30 10:01:32 crc kubenswrapper[4730]: I0930 10:01:32.336933 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:01:32 crc kubenswrapper[4730]: I0930 10:01:32.337279 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:01:32 crc kubenswrapper[4730]: I0930 10:01:32.934933 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-5pc6s" event={"ID":"24231826-1571-4b73-ae50-bc95035399b2","Type":"ContainerStarted","Data":"9b8fb61840b930e15f2b8059d018509fb408d2f9286d60b632daa6359bd72494"} Sep 30 10:01:32 crc kubenswrapper[4730]: I0930 10:01:32.936174 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-5j4jt" event={"ID":"a4994298-b2c8-4c02-9196-58d0cd805da1","Type":"ContainerStarted","Data":"38e253711e27496517c71bd8023f2817480f2c4159fca1496092ee97b8718c33"} Sep 30 10:01:32 crc kubenswrapper[4730]: I0930 10:01:32.936308 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:32 crc kubenswrapper[4730]: I0930 10:01:32.937367 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-6znsv" event={"ID":"05ca167b-1d36-4bd0-82f0-07b82f5e9a7d","Type":"ContainerStarted","Data":"ffc305ed768fa8216f47905d009cab909a28ebb5c6d65270242ee4136fb70069"} Sep 30 10:01:32 crc kubenswrapper[4730]: I0930 10:01:32.937530 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-6znsv" Sep 30 10:01:32 crc kubenswrapper[4730]: I0930 10:01:32.941231 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" event={"ID":"63a29940-b6e3-47cf-b71b-f010806ae889","Type":"ContainerStarted","Data":"bf844d4380b8660c1d6269787a20f6239981a4cd363db81328e568091eca39b3"} Sep 30 10:01:32 crc kubenswrapper[4730]: I0930 10:01:32.953327 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-5j4jt" podStartSLOduration=1.510927761 podStartE2EDuration="4.953307571s" podCreationTimestamp="2025-09-30 10:01:28 +0000 UTC" firstStartedPulling="2025-09-30 10:01:28.755818607 +0000 UTC m=+733.089078600" lastFinishedPulling="2025-09-30 10:01:32.198198417 +0000 UTC m=+736.531458410" observedRunningTime="2025-09-30 10:01:32.953208268 +0000 UTC m=+737.286468281" watchObservedRunningTime="2025-09-30 10:01:32.953307571 +0000 UTC m=+737.286567564" Sep 30 10:01:32 crc kubenswrapper[4730]: I0930 10:01:32.968652 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-2tzsp" podStartSLOduration=2.496148493 podStartE2EDuration="4.968629226s" podCreationTimestamp="2025-09-30 10:01:28 +0000 UTC" firstStartedPulling="2025-09-30 10:01:29.725703513 +0000 UTC m=+734.058963506" lastFinishedPulling="2025-09-30 10:01:32.198184246 +0000 UTC m=+736.531444239" observedRunningTime="2025-09-30 10:01:32.966073039 +0000 UTC m=+737.299333042" watchObservedRunningTime="2025-09-30 10:01:32.968629226 +0000 UTC m=+737.301889229" Sep 30 10:01:32 crc kubenswrapper[4730]: I0930 10:01:32.991945 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-6znsv" podStartSLOduration=1.9157928910000002 podStartE2EDuration="4.991922673s" podCreationTimestamp="2025-09-30 10:01:28 +0000 UTC" firstStartedPulling="2025-09-30 10:01:29.149533831 +0000 UTC m=+733.482793824" lastFinishedPulling="2025-09-30 10:01:32.225663613 +0000 UTC m=+736.558923606" observedRunningTime="2025-09-30 10:01:32.989366595 +0000 UTC m=+737.322626588" watchObservedRunningTime="2025-09-30 10:01:32.991922673 +0000 UTC m=+737.325182656" Sep 30 10:01:34 crc kubenswrapper[4730]: I0930 10:01:34.958824 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-5pc6s" event={"ID":"24231826-1571-4b73-ae50-bc95035399b2","Type":"ContainerStarted","Data":"5e72a61aad5f5c2ce21d376eaab47be2c7809caf9aceaa01984886affa02e782"} Sep 30 10:01:34 crc kubenswrapper[4730]: I0930 10:01:34.975078 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-5pc6s" podStartSLOduration=1.7512031270000001 podStartE2EDuration="6.975056541s" podCreationTimestamp="2025-09-30 10:01:28 +0000 UTC" firstStartedPulling="2025-09-30 10:01:29.215910238 +0000 UTC m=+733.549170231" lastFinishedPulling="2025-09-30 10:01:34.439763652 +0000 UTC m=+738.773023645" observedRunningTime="2025-09-30 10:01:34.970633164 +0000 UTC m=+739.303893167" watchObservedRunningTime="2025-09-30 10:01:34.975056541 +0000 UTC m=+739.308316534" Sep 30 10:01:38 crc kubenswrapper[4730]: I0930 10:01:38.729297 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-5j4jt" Sep 30 10:01:39 crc kubenswrapper[4730]: I0930 10:01:39.077582 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:39 crc kubenswrapper[4730]: I0930 10:01:39.077895 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:39 crc kubenswrapper[4730]: I0930 10:01:39.083179 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:40 crc kubenswrapper[4730]: I0930 10:01:40.004969 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6d9f495d8c-xc4gv" Sep 30 10:01:40 crc kubenswrapper[4730]: I0930 10:01:40.058097 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-md87h"] Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.018170 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-df648"] Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.018714 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" podUID="2cf85f90-a707-4cbf-9cea-472b1109692d" containerName="controller-manager" containerID="cri-o://87d1080c0946298811ddf6ccfb942c971dbcbe7efa93ff6436bac7476cd24528" gracePeriod=30 Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.132577 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg"] Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.132832 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" podUID="f17dee56-592e-45e7-8c4f-80854757d254" containerName="route-controller-manager" containerID="cri-o://62f11e9dd36c905303831657d65e23490b04c6ff4753450b5fac2d04c02f21e2" gracePeriod=30 Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.443441 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.507199 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2cf85f90-a707-4cbf-9cea-472b1109692d-serving-cert\") pod \"2cf85f90-a707-4cbf-9cea-472b1109692d\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.507262 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-client-ca\") pod \"2cf85f90-a707-4cbf-9cea-472b1109692d\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.507343 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-config\") pod \"2cf85f90-a707-4cbf-9cea-472b1109692d\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.507385 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-proxy-ca-bundles\") pod \"2cf85f90-a707-4cbf-9cea-472b1109692d\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.507446 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5vf7\" (UniqueName: \"kubernetes.io/projected/2cf85f90-a707-4cbf-9cea-472b1109692d-kube-api-access-h5vf7\") pod \"2cf85f90-a707-4cbf-9cea-472b1109692d\" (UID: \"2cf85f90-a707-4cbf-9cea-472b1109692d\") " Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.509309 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-config" (OuterVolumeSpecName: "config") pod "2cf85f90-a707-4cbf-9cea-472b1109692d" (UID: "2cf85f90-a707-4cbf-9cea-472b1109692d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.509585 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-client-ca" (OuterVolumeSpecName: "client-ca") pod "2cf85f90-a707-4cbf-9cea-472b1109692d" (UID: "2cf85f90-a707-4cbf-9cea-472b1109692d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.509866 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "2cf85f90-a707-4cbf-9cea-472b1109692d" (UID: "2cf85f90-a707-4cbf-9cea-472b1109692d"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.515968 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cf85f90-a707-4cbf-9cea-472b1109692d-kube-api-access-h5vf7" (OuterVolumeSpecName: "kube-api-access-h5vf7") pod "2cf85f90-a707-4cbf-9cea-472b1109692d" (UID: "2cf85f90-a707-4cbf-9cea-472b1109692d"). InnerVolumeSpecName "kube-api-access-h5vf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.516547 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cf85f90-a707-4cbf-9cea-472b1109692d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2cf85f90-a707-4cbf-9cea-472b1109692d" (UID: "2cf85f90-a707-4cbf-9cea-472b1109692d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.530475 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.608687 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f17dee56-592e-45e7-8c4f-80854757d254-serving-cert\") pod \"f17dee56-592e-45e7-8c4f-80854757d254\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.608869 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbb94\" (UniqueName: \"kubernetes.io/projected/f17dee56-592e-45e7-8c4f-80854757d254-kube-api-access-rbb94\") pod \"f17dee56-592e-45e7-8c4f-80854757d254\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.608978 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f17dee56-592e-45e7-8c4f-80854757d254-client-ca\") pod \"f17dee56-592e-45e7-8c4f-80854757d254\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.609021 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f17dee56-592e-45e7-8c4f-80854757d254-config\") pod \"f17dee56-592e-45e7-8c4f-80854757d254\" (UID: \"f17dee56-592e-45e7-8c4f-80854757d254\") " Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.609337 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.609364 4730 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.609383 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5vf7\" (UniqueName: \"kubernetes.io/projected/2cf85f90-a707-4cbf-9cea-472b1109692d-kube-api-access-h5vf7\") on node \"crc\" DevicePath \"\"" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.609398 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2cf85f90-a707-4cbf-9cea-472b1109692d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.609408 4730 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2cf85f90-a707-4cbf-9cea-472b1109692d-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.609697 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f17dee56-592e-45e7-8c4f-80854757d254-client-ca" (OuterVolumeSpecName: "client-ca") pod "f17dee56-592e-45e7-8c4f-80854757d254" (UID: "f17dee56-592e-45e7-8c4f-80854757d254"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.609748 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f17dee56-592e-45e7-8c4f-80854757d254-config" (OuterVolumeSpecName: "config") pod "f17dee56-592e-45e7-8c4f-80854757d254" (UID: "f17dee56-592e-45e7-8c4f-80854757d254"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.612367 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f17dee56-592e-45e7-8c4f-80854757d254-kube-api-access-rbb94" (OuterVolumeSpecName: "kube-api-access-rbb94") pod "f17dee56-592e-45e7-8c4f-80854757d254" (UID: "f17dee56-592e-45e7-8c4f-80854757d254"). InnerVolumeSpecName "kube-api-access-rbb94". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.612809 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f17dee56-592e-45e7-8c4f-80854757d254-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f17dee56-592e-45e7-8c4f-80854757d254" (UID: "f17dee56-592e-45e7-8c4f-80854757d254"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.710412 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbb94\" (UniqueName: \"kubernetes.io/projected/f17dee56-592e-45e7-8c4f-80854757d254-kube-api-access-rbb94\") on node \"crc\" DevicePath \"\"" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.710459 4730 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f17dee56-592e-45e7-8c4f-80854757d254-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.710471 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f17dee56-592e-45e7-8c4f-80854757d254-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:01:43 crc kubenswrapper[4730]: I0930 10:01:43.710481 4730 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f17dee56-592e-45e7-8c4f-80854757d254-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.026727 4730 generic.go:334] "Generic (PLEG): container finished" podID="f17dee56-592e-45e7-8c4f-80854757d254" containerID="62f11e9dd36c905303831657d65e23490b04c6ff4753450b5fac2d04c02f21e2" exitCode=0 Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.026808 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.026802 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" event={"ID":"f17dee56-592e-45e7-8c4f-80854757d254","Type":"ContainerDied","Data":"62f11e9dd36c905303831657d65e23490b04c6ff4753450b5fac2d04c02f21e2"} Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.027239 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg" event={"ID":"f17dee56-592e-45e7-8c4f-80854757d254","Type":"ContainerDied","Data":"e8e499b6f69810484c9872afcb0112e8fb5b2f5bf10c0a579fb9ed0c69e8a39e"} Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.027272 4730 scope.go:117] "RemoveContainer" containerID="62f11e9dd36c905303831657d65e23490b04c6ff4753450b5fac2d04c02f21e2" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.029021 4730 generic.go:334] "Generic (PLEG): container finished" podID="2cf85f90-a707-4cbf-9cea-472b1109692d" containerID="87d1080c0946298811ddf6ccfb942c971dbcbe7efa93ff6436bac7476cd24528" exitCode=0 Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.029071 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.029075 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" event={"ID":"2cf85f90-a707-4cbf-9cea-472b1109692d","Type":"ContainerDied","Data":"87d1080c0946298811ddf6ccfb942c971dbcbe7efa93ff6436bac7476cd24528"} Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.029116 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-df648" event={"ID":"2cf85f90-a707-4cbf-9cea-472b1109692d","Type":"ContainerDied","Data":"50211809c53b56e94bea9f53cc50e89d26440b98fe4118fccc26fd81e08a0b50"} Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.047188 4730 scope.go:117] "RemoveContainer" containerID="62f11e9dd36c905303831657d65e23490b04c6ff4753450b5fac2d04c02f21e2" Sep 30 10:01:44 crc kubenswrapper[4730]: E0930 10:01:44.047621 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62f11e9dd36c905303831657d65e23490b04c6ff4753450b5fac2d04c02f21e2\": container with ID starting with 62f11e9dd36c905303831657d65e23490b04c6ff4753450b5fac2d04c02f21e2 not found: ID does not exist" containerID="62f11e9dd36c905303831657d65e23490b04c6ff4753450b5fac2d04c02f21e2" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.047665 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62f11e9dd36c905303831657d65e23490b04c6ff4753450b5fac2d04c02f21e2"} err="failed to get container status \"62f11e9dd36c905303831657d65e23490b04c6ff4753450b5fac2d04c02f21e2\": rpc error: code = NotFound desc = could not find container \"62f11e9dd36c905303831657d65e23490b04c6ff4753450b5fac2d04c02f21e2\": container with ID starting with 62f11e9dd36c905303831657d65e23490b04c6ff4753450b5fac2d04c02f21e2 not found: ID does not exist" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.047700 4730 scope.go:117] "RemoveContainer" containerID="87d1080c0946298811ddf6ccfb942c971dbcbe7efa93ff6436bac7476cd24528" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.068367 4730 scope.go:117] "RemoveContainer" containerID="87d1080c0946298811ddf6ccfb942c971dbcbe7efa93ff6436bac7476cd24528" Sep 30 10:01:44 crc kubenswrapper[4730]: E0930 10:01:44.068761 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87d1080c0946298811ddf6ccfb942c971dbcbe7efa93ff6436bac7476cd24528\": container with ID starting with 87d1080c0946298811ddf6ccfb942c971dbcbe7efa93ff6436bac7476cd24528 not found: ID does not exist" containerID="87d1080c0946298811ddf6ccfb942c971dbcbe7efa93ff6436bac7476cd24528" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.068790 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87d1080c0946298811ddf6ccfb942c971dbcbe7efa93ff6436bac7476cd24528"} err="failed to get container status \"87d1080c0946298811ddf6ccfb942c971dbcbe7efa93ff6436bac7476cd24528\": rpc error: code = NotFound desc = could not find container \"87d1080c0946298811ddf6ccfb942c971dbcbe7efa93ff6436bac7476cd24528\": container with ID starting with 87d1080c0946298811ddf6ccfb942c971dbcbe7efa93ff6436bac7476cd24528 not found: ID does not exist" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.068817 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-df648"] Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.073990 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-df648"] Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.079227 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg"] Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.082914 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cljtg"] Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.387954 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cf85f90-a707-4cbf-9cea-472b1109692d" path="/var/lib/kubelet/pods/2cf85f90-a707-4cbf-9cea-472b1109692d/volumes" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.388458 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f17dee56-592e-45e7-8c4f-80854757d254" path="/var/lib/kubelet/pods/f17dee56-592e-45e7-8c4f-80854757d254/volumes" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.493215 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7"] Sep 30 10:01:44 crc kubenswrapper[4730]: E0930 10:01:44.493518 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cf85f90-a707-4cbf-9cea-472b1109692d" containerName="controller-manager" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.493539 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cf85f90-a707-4cbf-9cea-472b1109692d" containerName="controller-manager" Sep 30 10:01:44 crc kubenswrapper[4730]: E0930 10:01:44.493570 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f17dee56-592e-45e7-8c4f-80854757d254" containerName="route-controller-manager" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.493577 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f17dee56-592e-45e7-8c4f-80854757d254" containerName="route-controller-manager" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.493716 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f17dee56-592e-45e7-8c4f-80854757d254" containerName="route-controller-manager" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.493737 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cf85f90-a707-4cbf-9cea-472b1109692d" containerName="controller-manager" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.494193 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.496969 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-74d4cddf77-2hk9l"] Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.497056 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.497526 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.497688 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.497814 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.497862 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.497936 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.498046 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.501103 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.501312 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.501620 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.501746 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.507389 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-74d4cddf77-2hk9l"] Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.510277 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.511258 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.514003 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7"] Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.517932 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.625589 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxxdg\" (UniqueName: \"kubernetes.io/projected/da0a716f-2cd1-4a00-8599-0b2d7071e279-kube-api-access-fxxdg\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.625658 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da0a716f-2cd1-4a00-8599-0b2d7071e279-config\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.625806 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bda1681-b831-4d21-ae4e-1020915fd6d4-config\") pod \"route-controller-manager-85c564c446-vp5f7\" (UID: \"7bda1681-b831-4d21-ae4e-1020915fd6d4\") " pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.625852 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h89j6\" (UniqueName: \"kubernetes.io/projected/7bda1681-b831-4d21-ae4e-1020915fd6d4-kube-api-access-h89j6\") pod \"route-controller-manager-85c564c446-vp5f7\" (UID: \"7bda1681-b831-4d21-ae4e-1020915fd6d4\") " pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.625877 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bda1681-b831-4d21-ae4e-1020915fd6d4-serving-cert\") pod \"route-controller-manager-85c564c446-vp5f7\" (UID: \"7bda1681-b831-4d21-ae4e-1020915fd6d4\") " pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.626025 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7bda1681-b831-4d21-ae4e-1020915fd6d4-client-ca\") pod \"route-controller-manager-85c564c446-vp5f7\" (UID: \"7bda1681-b831-4d21-ae4e-1020915fd6d4\") " pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.626068 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da0a716f-2cd1-4a00-8599-0b2d7071e279-serving-cert\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.626203 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/da0a716f-2cd1-4a00-8599-0b2d7071e279-client-ca\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.626267 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/da0a716f-2cd1-4a00-8599-0b2d7071e279-proxy-ca-bundles\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.727472 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da0a716f-2cd1-4a00-8599-0b2d7071e279-config\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.727533 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bda1681-b831-4d21-ae4e-1020915fd6d4-config\") pod \"route-controller-manager-85c564c446-vp5f7\" (UID: \"7bda1681-b831-4d21-ae4e-1020915fd6d4\") " pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.727567 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h89j6\" (UniqueName: \"kubernetes.io/projected/7bda1681-b831-4d21-ae4e-1020915fd6d4-kube-api-access-h89j6\") pod \"route-controller-manager-85c564c446-vp5f7\" (UID: \"7bda1681-b831-4d21-ae4e-1020915fd6d4\") " pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.727588 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bda1681-b831-4d21-ae4e-1020915fd6d4-serving-cert\") pod \"route-controller-manager-85c564c446-vp5f7\" (UID: \"7bda1681-b831-4d21-ae4e-1020915fd6d4\") " pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.727656 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7bda1681-b831-4d21-ae4e-1020915fd6d4-client-ca\") pod \"route-controller-manager-85c564c446-vp5f7\" (UID: \"7bda1681-b831-4d21-ae4e-1020915fd6d4\") " pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.727684 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da0a716f-2cd1-4a00-8599-0b2d7071e279-serving-cert\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.727707 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/da0a716f-2cd1-4a00-8599-0b2d7071e279-client-ca\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.727724 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/da0a716f-2cd1-4a00-8599-0b2d7071e279-proxy-ca-bundles\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.727753 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxxdg\" (UniqueName: \"kubernetes.io/projected/da0a716f-2cd1-4a00-8599-0b2d7071e279-kube-api-access-fxxdg\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.729109 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7bda1681-b831-4d21-ae4e-1020915fd6d4-client-ca\") pod \"route-controller-manager-85c564c446-vp5f7\" (UID: \"7bda1681-b831-4d21-ae4e-1020915fd6d4\") " pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.730589 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bda1681-b831-4d21-ae4e-1020915fd6d4-config\") pod \"route-controller-manager-85c564c446-vp5f7\" (UID: \"7bda1681-b831-4d21-ae4e-1020915fd6d4\") " pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.730723 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/da0a716f-2cd1-4a00-8599-0b2d7071e279-client-ca\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.731083 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da0a716f-2cd1-4a00-8599-0b2d7071e279-config\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.732540 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bda1681-b831-4d21-ae4e-1020915fd6d4-serving-cert\") pod \"route-controller-manager-85c564c446-vp5f7\" (UID: \"7bda1681-b831-4d21-ae4e-1020915fd6d4\") " pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.732941 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/da0a716f-2cd1-4a00-8599-0b2d7071e279-proxy-ca-bundles\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.734186 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da0a716f-2cd1-4a00-8599-0b2d7071e279-serving-cert\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.758097 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxxdg\" (UniqueName: \"kubernetes.io/projected/da0a716f-2cd1-4a00-8599-0b2d7071e279-kube-api-access-fxxdg\") pod \"controller-manager-74d4cddf77-2hk9l\" (UID: \"da0a716f-2cd1-4a00-8599-0b2d7071e279\") " pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.761586 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h89j6\" (UniqueName: \"kubernetes.io/projected/7bda1681-b831-4d21-ae4e-1020915fd6d4-kube-api-access-h89j6\") pod \"route-controller-manager-85c564c446-vp5f7\" (UID: \"7bda1681-b831-4d21-ae4e-1020915fd6d4\") " pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.810909 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:44 crc kubenswrapper[4730]: I0930 10:01:44.822057 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:45 crc kubenswrapper[4730]: I0930 10:01:45.213710 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7"] Sep 30 10:01:45 crc kubenswrapper[4730]: W0930 10:01:45.226516 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7bda1681_b831_4d21_ae4e_1020915fd6d4.slice/crio-2ac3c7e96c6ab793f2360cc0cf5aa34f9c13272ad200af9c2196fc962404a10a WatchSource:0}: Error finding container 2ac3c7e96c6ab793f2360cc0cf5aa34f9c13272ad200af9c2196fc962404a10a: Status 404 returned error can't find the container with id 2ac3c7e96c6ab793f2360cc0cf5aa34f9c13272ad200af9c2196fc962404a10a Sep 30 10:01:45 crc kubenswrapper[4730]: I0930 10:01:45.287509 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-74d4cddf77-2hk9l"] Sep 30 10:01:45 crc kubenswrapper[4730]: W0930 10:01:45.305964 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda0a716f_2cd1_4a00_8599_0b2d7071e279.slice/crio-a5103fb7ca3fa3c2dbe4f83ea71ecffa33e4048540cdc50b3248468c971e8820 WatchSource:0}: Error finding container a5103fb7ca3fa3c2dbe4f83ea71ecffa33e4048540cdc50b3248468c971e8820: Status 404 returned error can't find the container with id a5103fb7ca3fa3c2dbe4f83ea71ecffa33e4048540cdc50b3248468c971e8820 Sep 30 10:01:46 crc kubenswrapper[4730]: I0930 10:01:46.049941 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" event={"ID":"7bda1681-b831-4d21-ae4e-1020915fd6d4","Type":"ContainerStarted","Data":"da7da507a0d78e42be6b42d726fef4ff84a38cec9d650276bde2492b22dd65a8"} Sep 30 10:01:46 crc kubenswrapper[4730]: I0930 10:01:46.050243 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" event={"ID":"7bda1681-b831-4d21-ae4e-1020915fd6d4","Type":"ContainerStarted","Data":"2ac3c7e96c6ab793f2360cc0cf5aa34f9c13272ad200af9c2196fc962404a10a"} Sep 30 10:01:46 crc kubenswrapper[4730]: I0930 10:01:46.051384 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:46 crc kubenswrapper[4730]: I0930 10:01:46.053419 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" event={"ID":"da0a716f-2cd1-4a00-8599-0b2d7071e279","Type":"ContainerStarted","Data":"b1adcab70836b3ac69591c55c81dbfa40e7f36f77e75d2d0f0f752ff52b11fad"} Sep 30 10:01:46 crc kubenswrapper[4730]: I0930 10:01:46.053481 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" event={"ID":"da0a716f-2cd1-4a00-8599-0b2d7071e279","Type":"ContainerStarted","Data":"a5103fb7ca3fa3c2dbe4f83ea71ecffa33e4048540cdc50b3248468c971e8820"} Sep 30 10:01:46 crc kubenswrapper[4730]: I0930 10:01:46.053907 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:46 crc kubenswrapper[4730]: I0930 10:01:46.057080 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" Sep 30 10:01:46 crc kubenswrapper[4730]: I0930 10:01:46.058791 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" Sep 30 10:01:46 crc kubenswrapper[4730]: I0930 10:01:46.070092 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-85c564c446-vp5f7" podStartSLOduration=3.070072779 podStartE2EDuration="3.070072779s" podCreationTimestamp="2025-09-30 10:01:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:01:46.065301483 +0000 UTC m=+750.398561476" watchObservedRunningTime="2025-09-30 10:01:46.070072779 +0000 UTC m=+750.403332772" Sep 30 10:01:46 crc kubenswrapper[4730]: I0930 10:01:46.113593 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-74d4cddf77-2hk9l" podStartSLOduration=3.11357431 podStartE2EDuration="3.11357431s" podCreationTimestamp="2025-09-30 10:01:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:01:46.107261163 +0000 UTC m=+750.440521146" watchObservedRunningTime="2025-09-30 10:01:46.11357431 +0000 UTC m=+750.446834303" Sep 30 10:01:48 crc kubenswrapper[4730]: I0930 10:01:48.697777 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-6znsv" Sep 30 10:01:52 crc kubenswrapper[4730]: I0930 10:01:52.125728 4730 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.036485 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mdfs6"] Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.038531 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.040582 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdfs6"] Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.172362 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-catalog-content\") pod \"redhat-marketplace-mdfs6\" (UID: \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\") " pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.172422 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56d7b\" (UniqueName: \"kubernetes.io/projected/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-kube-api-access-56d7b\") pod \"redhat-marketplace-mdfs6\" (UID: \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\") " pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.172584 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-utilities\") pod \"redhat-marketplace-mdfs6\" (UID: \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\") " pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.273937 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-utilities\") pod \"redhat-marketplace-mdfs6\" (UID: \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\") " pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.274009 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-catalog-content\") pod \"redhat-marketplace-mdfs6\" (UID: \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\") " pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.274053 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56d7b\" (UniqueName: \"kubernetes.io/projected/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-kube-api-access-56d7b\") pod \"redhat-marketplace-mdfs6\" (UID: \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\") " pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.274559 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-catalog-content\") pod \"redhat-marketplace-mdfs6\" (UID: \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\") " pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.274912 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-utilities\") pod \"redhat-marketplace-mdfs6\" (UID: \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\") " pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.312952 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56d7b\" (UniqueName: \"kubernetes.io/projected/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-kube-api-access-56d7b\") pod \"redhat-marketplace-mdfs6\" (UID: \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\") " pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.356941 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:01 crc kubenswrapper[4730]: I0930 10:02:01.783451 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdfs6"] Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.172161 4730 generic.go:334] "Generic (PLEG): container finished" podID="1ad8281f-ae5e-4f60-8915-da0ca06e66c1" containerID="ac9ce89ab20c918d42547e41eb262a002df1793d6b4767e5b729fbf7440cdb5b" exitCode=0 Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.172265 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdfs6" event={"ID":"1ad8281f-ae5e-4f60-8915-da0ca06e66c1","Type":"ContainerDied","Data":"ac9ce89ab20c918d42547e41eb262a002df1793d6b4767e5b729fbf7440cdb5b"} Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.172523 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdfs6" event={"ID":"1ad8281f-ae5e-4f60-8915-da0ca06e66c1","Type":"ContainerStarted","Data":"e19764377a0297533749d5139b0f029eb60c46afb94a46febc625a4fbd884741"} Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.336875 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.336963 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.337022 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.337724 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e4ef153cbbd5d6d6260e417ec2d4e0d4bbc0012c9d4b4d0945d491a415dda27d"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.337794 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://e4ef153cbbd5d6d6260e417ec2d4e0d4bbc0012c9d4b4d0945d491a415dda27d" gracePeriod=600 Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.886856 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv"] Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.888296 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.890112 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.896422 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv"] Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.995747 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xdzn\" (UniqueName: \"kubernetes.io/projected/99980a45-f5ca-428d-b285-bc4f72ff8e28-kube-api-access-6xdzn\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv\" (UID: \"99980a45-f5ca-428d-b285-bc4f72ff8e28\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.995812 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/99980a45-f5ca-428d-b285-bc4f72ff8e28-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv\" (UID: \"99980a45-f5ca-428d-b285-bc4f72ff8e28\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" Sep 30 10:02:02 crc kubenswrapper[4730]: I0930 10:02:02.995878 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/99980a45-f5ca-428d-b285-bc4f72ff8e28-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv\" (UID: \"99980a45-f5ca-428d-b285-bc4f72ff8e28\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.097126 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/99980a45-f5ca-428d-b285-bc4f72ff8e28-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv\" (UID: \"99980a45-f5ca-428d-b285-bc4f72ff8e28\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.097230 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xdzn\" (UniqueName: \"kubernetes.io/projected/99980a45-f5ca-428d-b285-bc4f72ff8e28-kube-api-access-6xdzn\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv\" (UID: \"99980a45-f5ca-428d-b285-bc4f72ff8e28\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.097266 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/99980a45-f5ca-428d-b285-bc4f72ff8e28-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv\" (UID: \"99980a45-f5ca-428d-b285-bc4f72ff8e28\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.097936 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/99980a45-f5ca-428d-b285-bc4f72ff8e28-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv\" (UID: \"99980a45-f5ca-428d-b285-bc4f72ff8e28\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.097963 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/99980a45-f5ca-428d-b285-bc4f72ff8e28-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv\" (UID: \"99980a45-f5ca-428d-b285-bc4f72ff8e28\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.117929 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xdzn\" (UniqueName: \"kubernetes.io/projected/99980a45-f5ca-428d-b285-bc4f72ff8e28-kube-api-access-6xdzn\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv\" (UID: \"99980a45-f5ca-428d-b285-bc4f72ff8e28\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.180733 4730 generic.go:334] "Generic (PLEG): container finished" podID="1ad8281f-ae5e-4f60-8915-da0ca06e66c1" containerID="2aa8446cffebf64a31583109fa70d4ede7038309858a643f10fc04ee625cc2f4" exitCode=0 Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.180832 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdfs6" event={"ID":"1ad8281f-ae5e-4f60-8915-da0ca06e66c1","Type":"ContainerDied","Data":"2aa8446cffebf64a31583109fa70d4ede7038309858a643f10fc04ee625cc2f4"} Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.189151 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="e4ef153cbbd5d6d6260e417ec2d4e0d4bbc0012c9d4b4d0945d491a415dda27d" exitCode=0 Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.189184 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"e4ef153cbbd5d6d6260e417ec2d4e0d4bbc0012c9d4b4d0945d491a415dda27d"} Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.189242 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"900986cbdecf38d2005d5e11f37ce0d1a6c8ab5af66f64b87d1373420d3568ee"} Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.189263 4730 scope.go:117] "RemoveContainer" containerID="dafc5f81114ac6c819cc935ab6256c7275b10fad50ffbd38ad7d62bed43ffa86" Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.209803 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" Sep 30 10:02:03 crc kubenswrapper[4730]: I0930 10:02:03.701474 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv"] Sep 30 10:02:03 crc kubenswrapper[4730]: W0930 10:02:03.704718 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99980a45_f5ca_428d_b285_bc4f72ff8e28.slice/crio-3673f8dd6d8d4fba19fdbcc1126c552d8adc980e67edb12277ccc968be85a18b WatchSource:0}: Error finding container 3673f8dd6d8d4fba19fdbcc1126c552d8adc980e67edb12277ccc968be85a18b: Status 404 returned error can't find the container with id 3673f8dd6d8d4fba19fdbcc1126c552d8adc980e67edb12277ccc968be85a18b Sep 30 10:02:04 crc kubenswrapper[4730]: I0930 10:02:04.205443 4730 generic.go:334] "Generic (PLEG): container finished" podID="99980a45-f5ca-428d-b285-bc4f72ff8e28" containerID="d1d739d44a61f852a30a5ad95996236c07d4f09b2b9b4338bfd4a82d69ce46ea" exitCode=0 Sep 30 10:02:04 crc kubenswrapper[4730]: I0930 10:02:04.205905 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" event={"ID":"99980a45-f5ca-428d-b285-bc4f72ff8e28","Type":"ContainerDied","Data":"d1d739d44a61f852a30a5ad95996236c07d4f09b2b9b4338bfd4a82d69ce46ea"} Sep 30 10:02:04 crc kubenswrapper[4730]: I0930 10:02:04.205954 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" event={"ID":"99980a45-f5ca-428d-b285-bc4f72ff8e28","Type":"ContainerStarted","Data":"3673f8dd6d8d4fba19fdbcc1126c552d8adc980e67edb12277ccc968be85a18b"} Sep 30 10:02:04 crc kubenswrapper[4730]: I0930 10:02:04.222081 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdfs6" event={"ID":"1ad8281f-ae5e-4f60-8915-da0ca06e66c1","Type":"ContainerStarted","Data":"2dc6297caf81c1009db1c48c8006496c9cdae1deb9e637dd78c4ea326eb83d32"} Sep 30 10:02:04 crc kubenswrapper[4730]: I0930 10:02:04.243795 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mdfs6" podStartSLOduration=1.825637474 podStartE2EDuration="3.243770726s" podCreationTimestamp="2025-09-30 10:02:01 +0000 UTC" firstStartedPulling="2025-09-30 10:02:02.173583176 +0000 UTC m=+766.506843169" lastFinishedPulling="2025-09-30 10:02:03.591716428 +0000 UTC m=+767.924976421" observedRunningTime="2025-09-30 10:02:04.238870097 +0000 UTC m=+768.572130080" watchObservedRunningTime="2025-09-30 10:02:04.243770726 +0000 UTC m=+768.577030719" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.098531 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-md87h" podUID="3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" containerName="console" containerID="cri-o://82fd6408e8d281795129803e3ea26c13aef10ee065b8cdef3cba62d99ca559f7" gracePeriod=15 Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.229878 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-md87h_3c83ea01-beb2-4b6c-b67b-93cea3b56ca7/console/0.log" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.229933 4730 generic.go:334] "Generic (PLEG): container finished" podID="3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" containerID="82fd6408e8d281795129803e3ea26c13aef10ee065b8cdef3cba62d99ca559f7" exitCode=2 Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.230039 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-md87h" event={"ID":"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7","Type":"ContainerDied","Data":"82fd6408e8d281795129803e3ea26c13aef10ee065b8cdef3cba62d99ca559f7"} Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.640233 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-md87h_3c83ea01-beb2-4b6c-b67b-93cea3b56ca7/console/0.log" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.640539 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-md87h" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.730939 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-oauth-serving-cert\") pod \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.731041 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-service-ca\") pod \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.731080 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-trusted-ca-bundle\") pod \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.731126 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-oauth-config\") pod \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.731148 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5lnm\" (UniqueName: \"kubernetes.io/projected/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-kube-api-access-r5lnm\") pod \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.731171 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-serving-cert\") pod \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.731195 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-config\") pod \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\" (UID: \"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7\") " Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.731708 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" (UID: "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.731736 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-service-ca" (OuterVolumeSpecName: "service-ca") pod "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" (UID: "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.731869 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-config" (OuterVolumeSpecName: "console-config") pod "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" (UID: "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.731867 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" (UID: "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.746172 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" (UID: "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.746384 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" (UID: "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.746936 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-kube-api-access-r5lnm" (OuterVolumeSpecName: "kube-api-access-r5lnm") pod "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" (UID: "3c83ea01-beb2-4b6c-b67b-93cea3b56ca7"). InnerVolumeSpecName "kube-api-access-r5lnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.832980 4730 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.833025 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5lnm\" (UniqueName: \"kubernetes.io/projected/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-kube-api-access-r5lnm\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.833036 4730 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.833045 4730 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.833054 4730 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.833062 4730 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:05 crc kubenswrapper[4730]: I0930 10:02:05.833071 4730 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:06 crc kubenswrapper[4730]: I0930 10:02:06.237711 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-md87h_3c83ea01-beb2-4b6c-b67b-93cea3b56ca7/console/0.log" Sep 30 10:02:06 crc kubenswrapper[4730]: I0930 10:02:06.238119 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-md87h" event={"ID":"3c83ea01-beb2-4b6c-b67b-93cea3b56ca7","Type":"ContainerDied","Data":"2de6eda61f3f00561f50c1d9d504a95ddad54a6f8cd6886437e5f18497c121f2"} Sep 30 10:02:06 crc kubenswrapper[4730]: I0930 10:02:06.238156 4730 scope.go:117] "RemoveContainer" containerID="82fd6408e8d281795129803e3ea26c13aef10ee065b8cdef3cba62d99ca559f7" Sep 30 10:02:06 crc kubenswrapper[4730]: I0930 10:02:06.238202 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-md87h" Sep 30 10:02:06 crc kubenswrapper[4730]: I0930 10:02:06.241732 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" event={"ID":"99980a45-f5ca-428d-b285-bc4f72ff8e28","Type":"ContainerStarted","Data":"deff53300fd9d810938dd652d42cdd27019de83fab7680b8a292490e41ae930e"} Sep 30 10:02:06 crc kubenswrapper[4730]: I0930 10:02:06.341869 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-md87h"] Sep 30 10:02:06 crc kubenswrapper[4730]: I0930 10:02:06.346677 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-md87h"] Sep 30 10:02:06 crc kubenswrapper[4730]: I0930 10:02:06.391209 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" path="/var/lib/kubelet/pods/3c83ea01-beb2-4b6c-b67b-93cea3b56ca7/volumes" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.249692 4730 generic.go:334] "Generic (PLEG): container finished" podID="99980a45-f5ca-428d-b285-bc4f72ff8e28" containerID="deff53300fd9d810938dd652d42cdd27019de83fab7680b8a292490e41ae930e" exitCode=0 Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.249732 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" event={"ID":"99980a45-f5ca-428d-b285-bc4f72ff8e28","Type":"ContainerDied","Data":"deff53300fd9d810938dd652d42cdd27019de83fab7680b8a292490e41ae930e"} Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.631000 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cxgwv"] Sep 30 10:02:07 crc kubenswrapper[4730]: E0930 10:02:07.631530 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" containerName="console" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.631542 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" containerName="console" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.631666 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c83ea01-beb2-4b6c-b67b-93cea3b56ca7" containerName="console" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.632553 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.642575 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cxgwv"] Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.767262 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0490cc9-3d97-4a6e-a202-b1a1143faa84-utilities\") pod \"redhat-operators-cxgwv\" (UID: \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\") " pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.767315 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v44dq\" (UniqueName: \"kubernetes.io/projected/a0490cc9-3d97-4a6e-a202-b1a1143faa84-kube-api-access-v44dq\") pod \"redhat-operators-cxgwv\" (UID: \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\") " pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.767345 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0490cc9-3d97-4a6e-a202-b1a1143faa84-catalog-content\") pod \"redhat-operators-cxgwv\" (UID: \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\") " pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.868559 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0490cc9-3d97-4a6e-a202-b1a1143faa84-catalog-content\") pod \"redhat-operators-cxgwv\" (UID: \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\") " pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.868740 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0490cc9-3d97-4a6e-a202-b1a1143faa84-utilities\") pod \"redhat-operators-cxgwv\" (UID: \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\") " pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.868759 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v44dq\" (UniqueName: \"kubernetes.io/projected/a0490cc9-3d97-4a6e-a202-b1a1143faa84-kube-api-access-v44dq\") pod \"redhat-operators-cxgwv\" (UID: \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\") " pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.869050 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0490cc9-3d97-4a6e-a202-b1a1143faa84-catalog-content\") pod \"redhat-operators-cxgwv\" (UID: \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\") " pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.869174 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0490cc9-3d97-4a6e-a202-b1a1143faa84-utilities\") pod \"redhat-operators-cxgwv\" (UID: \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\") " pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.888363 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v44dq\" (UniqueName: \"kubernetes.io/projected/a0490cc9-3d97-4a6e-a202-b1a1143faa84-kube-api-access-v44dq\") pod \"redhat-operators-cxgwv\" (UID: \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\") " pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:07 crc kubenswrapper[4730]: I0930 10:02:07.946857 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:08 crc kubenswrapper[4730]: I0930 10:02:08.260675 4730 generic.go:334] "Generic (PLEG): container finished" podID="99980a45-f5ca-428d-b285-bc4f72ff8e28" containerID="6f43299478d8b5f63e3a6cdb388d93009c0737877394b552922c9e1db3e9978a" exitCode=0 Sep 30 10:02:08 crc kubenswrapper[4730]: I0930 10:02:08.260891 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" event={"ID":"99980a45-f5ca-428d-b285-bc4f72ff8e28","Type":"ContainerDied","Data":"6f43299478d8b5f63e3a6cdb388d93009c0737877394b552922c9e1db3e9978a"} Sep 30 10:02:08 crc kubenswrapper[4730]: I0930 10:02:08.428709 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cxgwv"] Sep 30 10:02:08 crc kubenswrapper[4730]: W0930 10:02:08.444053 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda0490cc9_3d97_4a6e_a202_b1a1143faa84.slice/crio-7fae99f5c820c6498fbb3f0ab08a3e95838fe8382a33bb5774bff9cae791e2cd WatchSource:0}: Error finding container 7fae99f5c820c6498fbb3f0ab08a3e95838fe8382a33bb5774bff9cae791e2cd: Status 404 returned error can't find the container with id 7fae99f5c820c6498fbb3f0ab08a3e95838fe8382a33bb5774bff9cae791e2cd Sep 30 10:02:09 crc kubenswrapper[4730]: I0930 10:02:09.269905 4730 generic.go:334] "Generic (PLEG): container finished" podID="a0490cc9-3d97-4a6e-a202-b1a1143faa84" containerID="7e487fcf14b1f6ce664f2b176afd2255e2eb534b9fd3a812db80248d7231447f" exitCode=0 Sep 30 10:02:09 crc kubenswrapper[4730]: I0930 10:02:09.269992 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxgwv" event={"ID":"a0490cc9-3d97-4a6e-a202-b1a1143faa84","Type":"ContainerDied","Data":"7e487fcf14b1f6ce664f2b176afd2255e2eb534b9fd3a812db80248d7231447f"} Sep 30 10:02:09 crc kubenswrapper[4730]: I0930 10:02:09.270315 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxgwv" event={"ID":"a0490cc9-3d97-4a6e-a202-b1a1143faa84","Type":"ContainerStarted","Data":"7fae99f5c820c6498fbb3f0ab08a3e95838fe8382a33bb5774bff9cae791e2cd"} Sep 30 10:02:09 crc kubenswrapper[4730]: I0930 10:02:09.681190 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" Sep 30 10:02:09 crc kubenswrapper[4730]: I0930 10:02:09.791015 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/99980a45-f5ca-428d-b285-bc4f72ff8e28-bundle\") pod \"99980a45-f5ca-428d-b285-bc4f72ff8e28\" (UID: \"99980a45-f5ca-428d-b285-bc4f72ff8e28\") " Sep 30 10:02:09 crc kubenswrapper[4730]: I0930 10:02:09.791180 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xdzn\" (UniqueName: \"kubernetes.io/projected/99980a45-f5ca-428d-b285-bc4f72ff8e28-kube-api-access-6xdzn\") pod \"99980a45-f5ca-428d-b285-bc4f72ff8e28\" (UID: \"99980a45-f5ca-428d-b285-bc4f72ff8e28\") " Sep 30 10:02:09 crc kubenswrapper[4730]: I0930 10:02:09.791209 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/99980a45-f5ca-428d-b285-bc4f72ff8e28-util\") pod \"99980a45-f5ca-428d-b285-bc4f72ff8e28\" (UID: \"99980a45-f5ca-428d-b285-bc4f72ff8e28\") " Sep 30 10:02:09 crc kubenswrapper[4730]: I0930 10:02:09.792003 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99980a45-f5ca-428d-b285-bc4f72ff8e28-bundle" (OuterVolumeSpecName: "bundle") pod "99980a45-f5ca-428d-b285-bc4f72ff8e28" (UID: "99980a45-f5ca-428d-b285-bc4f72ff8e28"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:02:09 crc kubenswrapper[4730]: I0930 10:02:09.799805 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99980a45-f5ca-428d-b285-bc4f72ff8e28-kube-api-access-6xdzn" (OuterVolumeSpecName: "kube-api-access-6xdzn") pod "99980a45-f5ca-428d-b285-bc4f72ff8e28" (UID: "99980a45-f5ca-428d-b285-bc4f72ff8e28"). InnerVolumeSpecName "kube-api-access-6xdzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:02:09 crc kubenswrapper[4730]: I0930 10:02:09.802681 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99980a45-f5ca-428d-b285-bc4f72ff8e28-util" (OuterVolumeSpecName: "util") pod "99980a45-f5ca-428d-b285-bc4f72ff8e28" (UID: "99980a45-f5ca-428d-b285-bc4f72ff8e28"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:02:09 crc kubenswrapper[4730]: I0930 10:02:09.892627 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xdzn\" (UniqueName: \"kubernetes.io/projected/99980a45-f5ca-428d-b285-bc4f72ff8e28-kube-api-access-6xdzn\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:09 crc kubenswrapper[4730]: I0930 10:02:09.892663 4730 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/99980a45-f5ca-428d-b285-bc4f72ff8e28-util\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:09 crc kubenswrapper[4730]: I0930 10:02:09.892678 4730 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/99980a45-f5ca-428d-b285-bc4f72ff8e28-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:10 crc kubenswrapper[4730]: I0930 10:02:10.279063 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" event={"ID":"99980a45-f5ca-428d-b285-bc4f72ff8e28","Type":"ContainerDied","Data":"3673f8dd6d8d4fba19fdbcc1126c552d8adc980e67edb12277ccc968be85a18b"} Sep 30 10:02:10 crc kubenswrapper[4730]: I0930 10:02:10.279421 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3673f8dd6d8d4fba19fdbcc1126c552d8adc980e67edb12277ccc968be85a18b" Sep 30 10:02:10 crc kubenswrapper[4730]: I0930 10:02:10.279108 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv" Sep 30 10:02:10 crc kubenswrapper[4730]: I0930 10:02:10.283160 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxgwv" event={"ID":"a0490cc9-3d97-4a6e-a202-b1a1143faa84","Type":"ContainerStarted","Data":"bbe5d088c078beff851eb13701182a063f724e97b9130f8b78b239227a4200f5"} Sep 30 10:02:11 crc kubenswrapper[4730]: I0930 10:02:11.358073 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:11 crc kubenswrapper[4730]: I0930 10:02:11.358126 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:11 crc kubenswrapper[4730]: I0930 10:02:11.435432 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:12 crc kubenswrapper[4730]: I0930 10:02:12.294526 4730 generic.go:334] "Generic (PLEG): container finished" podID="a0490cc9-3d97-4a6e-a202-b1a1143faa84" containerID="bbe5d088c078beff851eb13701182a063f724e97b9130f8b78b239227a4200f5" exitCode=0 Sep 30 10:02:12 crc kubenswrapper[4730]: I0930 10:02:12.294624 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxgwv" event={"ID":"a0490cc9-3d97-4a6e-a202-b1a1143faa84","Type":"ContainerDied","Data":"bbe5d088c078beff851eb13701182a063f724e97b9130f8b78b239227a4200f5"} Sep 30 10:02:12 crc kubenswrapper[4730]: I0930 10:02:12.348185 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:13 crc kubenswrapper[4730]: I0930 10:02:13.302130 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxgwv" event={"ID":"a0490cc9-3d97-4a6e-a202-b1a1143faa84","Type":"ContainerStarted","Data":"1921a193b668cc44a56cf32afc0ef1a62c3d549295981e6003fa9872891df754"} Sep 30 10:02:14 crc kubenswrapper[4730]: I0930 10:02:14.626606 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cxgwv" podStartSLOduration=3.904648769 podStartE2EDuration="7.626585582s" podCreationTimestamp="2025-09-30 10:02:07 +0000 UTC" firstStartedPulling="2025-09-30 10:02:09.271991494 +0000 UTC m=+773.605251487" lastFinishedPulling="2025-09-30 10:02:12.993928307 +0000 UTC m=+777.327188300" observedRunningTime="2025-09-30 10:02:13.330688514 +0000 UTC m=+777.663948527" watchObservedRunningTime="2025-09-30 10:02:14.626585582 +0000 UTC m=+778.959845575" Sep 30 10:02:14 crc kubenswrapper[4730]: I0930 10:02:14.627054 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdfs6"] Sep 30 10:02:14 crc kubenswrapper[4730]: I0930 10:02:14.627278 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mdfs6" podUID="1ad8281f-ae5e-4f60-8915-da0ca06e66c1" containerName="registry-server" containerID="cri-o://2dc6297caf81c1009db1c48c8006496c9cdae1deb9e637dd78c4ea326eb83d32" gracePeriod=2 Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.250908 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.315044 4730 generic.go:334] "Generic (PLEG): container finished" podID="1ad8281f-ae5e-4f60-8915-da0ca06e66c1" containerID="2dc6297caf81c1009db1c48c8006496c9cdae1deb9e637dd78c4ea326eb83d32" exitCode=0 Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.315092 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdfs6" event={"ID":"1ad8281f-ae5e-4f60-8915-da0ca06e66c1","Type":"ContainerDied","Data":"2dc6297caf81c1009db1c48c8006496c9cdae1deb9e637dd78c4ea326eb83d32"} Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.315127 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdfs6" event={"ID":"1ad8281f-ae5e-4f60-8915-da0ca06e66c1","Type":"ContainerDied","Data":"e19764377a0297533749d5139b0f029eb60c46afb94a46febc625a4fbd884741"} Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.315116 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdfs6" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.315147 4730 scope.go:117] "RemoveContainer" containerID="2dc6297caf81c1009db1c48c8006496c9cdae1deb9e637dd78c4ea326eb83d32" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.331293 4730 scope.go:117] "RemoveContainer" containerID="2aa8446cffebf64a31583109fa70d4ede7038309858a643f10fc04ee625cc2f4" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.353761 4730 scope.go:117] "RemoveContainer" containerID="ac9ce89ab20c918d42547e41eb262a002df1793d6b4767e5b729fbf7440cdb5b" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.372180 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-catalog-content\") pod \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\" (UID: \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\") " Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.372231 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-utilities\") pod \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\" (UID: \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\") " Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.372262 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56d7b\" (UniqueName: \"kubernetes.io/projected/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-kube-api-access-56d7b\") pod \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\" (UID: \"1ad8281f-ae5e-4f60-8915-da0ca06e66c1\") " Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.373211 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-utilities" (OuterVolumeSpecName: "utilities") pod "1ad8281f-ae5e-4f60-8915-da0ca06e66c1" (UID: "1ad8281f-ae5e-4f60-8915-da0ca06e66c1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.373390 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.380217 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-kube-api-access-56d7b" (OuterVolumeSpecName: "kube-api-access-56d7b") pod "1ad8281f-ae5e-4f60-8915-da0ca06e66c1" (UID: "1ad8281f-ae5e-4f60-8915-da0ca06e66c1"). InnerVolumeSpecName "kube-api-access-56d7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.391772 4730 scope.go:117] "RemoveContainer" containerID="2dc6297caf81c1009db1c48c8006496c9cdae1deb9e637dd78c4ea326eb83d32" Sep 30 10:02:15 crc kubenswrapper[4730]: E0930 10:02:15.392288 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2dc6297caf81c1009db1c48c8006496c9cdae1deb9e637dd78c4ea326eb83d32\": container with ID starting with 2dc6297caf81c1009db1c48c8006496c9cdae1deb9e637dd78c4ea326eb83d32 not found: ID does not exist" containerID="2dc6297caf81c1009db1c48c8006496c9cdae1deb9e637dd78c4ea326eb83d32" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.392318 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dc6297caf81c1009db1c48c8006496c9cdae1deb9e637dd78c4ea326eb83d32"} err="failed to get container status \"2dc6297caf81c1009db1c48c8006496c9cdae1deb9e637dd78c4ea326eb83d32\": rpc error: code = NotFound desc = could not find container \"2dc6297caf81c1009db1c48c8006496c9cdae1deb9e637dd78c4ea326eb83d32\": container with ID starting with 2dc6297caf81c1009db1c48c8006496c9cdae1deb9e637dd78c4ea326eb83d32 not found: ID does not exist" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.392339 4730 scope.go:117] "RemoveContainer" containerID="2aa8446cffebf64a31583109fa70d4ede7038309858a643f10fc04ee625cc2f4" Sep 30 10:02:15 crc kubenswrapper[4730]: E0930 10:02:15.392519 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2aa8446cffebf64a31583109fa70d4ede7038309858a643f10fc04ee625cc2f4\": container with ID starting with 2aa8446cffebf64a31583109fa70d4ede7038309858a643f10fc04ee625cc2f4 not found: ID does not exist" containerID="2aa8446cffebf64a31583109fa70d4ede7038309858a643f10fc04ee625cc2f4" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.392541 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2aa8446cffebf64a31583109fa70d4ede7038309858a643f10fc04ee625cc2f4"} err="failed to get container status \"2aa8446cffebf64a31583109fa70d4ede7038309858a643f10fc04ee625cc2f4\": rpc error: code = NotFound desc = could not find container \"2aa8446cffebf64a31583109fa70d4ede7038309858a643f10fc04ee625cc2f4\": container with ID starting with 2aa8446cffebf64a31583109fa70d4ede7038309858a643f10fc04ee625cc2f4 not found: ID does not exist" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.392555 4730 scope.go:117] "RemoveContainer" containerID="ac9ce89ab20c918d42547e41eb262a002df1793d6b4767e5b729fbf7440cdb5b" Sep 30 10:02:15 crc kubenswrapper[4730]: E0930 10:02:15.392722 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac9ce89ab20c918d42547e41eb262a002df1793d6b4767e5b729fbf7440cdb5b\": container with ID starting with ac9ce89ab20c918d42547e41eb262a002df1793d6b4767e5b729fbf7440cdb5b not found: ID does not exist" containerID="ac9ce89ab20c918d42547e41eb262a002df1793d6b4767e5b729fbf7440cdb5b" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.392744 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac9ce89ab20c918d42547e41eb262a002df1793d6b4767e5b729fbf7440cdb5b"} err="failed to get container status \"ac9ce89ab20c918d42547e41eb262a002df1793d6b4767e5b729fbf7440cdb5b\": rpc error: code = NotFound desc = could not find container \"ac9ce89ab20c918d42547e41eb262a002df1793d6b4767e5b729fbf7440cdb5b\": container with ID starting with ac9ce89ab20c918d42547e41eb262a002df1793d6b4767e5b729fbf7440cdb5b not found: ID does not exist" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.395771 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1ad8281f-ae5e-4f60-8915-da0ca06e66c1" (UID: "1ad8281f-ae5e-4f60-8915-da0ca06e66c1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.474927 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.474966 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56d7b\" (UniqueName: \"kubernetes.io/projected/1ad8281f-ae5e-4f60-8915-da0ca06e66c1-kube-api-access-56d7b\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.638747 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdfs6"] Sep 30 10:02:15 crc kubenswrapper[4730]: I0930 10:02:15.642641 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdfs6"] Sep 30 10:02:16 crc kubenswrapper[4730]: I0930 10:02:16.388288 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ad8281f-ae5e-4f60-8915-da0ca06e66c1" path="/var/lib/kubelet/pods/1ad8281f-ae5e-4f60-8915-da0ca06e66c1/volumes" Sep 30 10:02:17 crc kubenswrapper[4730]: I0930 10:02:17.947977 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:17 crc kubenswrapper[4730]: I0930 10:02:17.948440 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.393361 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7"] Sep 30 10:02:18 crc kubenswrapper[4730]: E0930 10:02:18.393704 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ad8281f-ae5e-4f60-8915-da0ca06e66c1" containerName="extract-utilities" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.393728 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ad8281f-ae5e-4f60-8915-da0ca06e66c1" containerName="extract-utilities" Sep 30 10:02:18 crc kubenswrapper[4730]: E0930 10:02:18.393740 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99980a45-f5ca-428d-b285-bc4f72ff8e28" containerName="pull" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.393751 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="99980a45-f5ca-428d-b285-bc4f72ff8e28" containerName="pull" Sep 30 10:02:18 crc kubenswrapper[4730]: E0930 10:02:18.393774 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99980a45-f5ca-428d-b285-bc4f72ff8e28" containerName="extract" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.393783 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="99980a45-f5ca-428d-b285-bc4f72ff8e28" containerName="extract" Sep 30 10:02:18 crc kubenswrapper[4730]: E0930 10:02:18.393800 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99980a45-f5ca-428d-b285-bc4f72ff8e28" containerName="util" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.393808 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="99980a45-f5ca-428d-b285-bc4f72ff8e28" containerName="util" Sep 30 10:02:18 crc kubenswrapper[4730]: E0930 10:02:18.393839 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ad8281f-ae5e-4f60-8915-da0ca06e66c1" containerName="extract-content" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.393848 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ad8281f-ae5e-4f60-8915-da0ca06e66c1" containerName="extract-content" Sep 30 10:02:18 crc kubenswrapper[4730]: E0930 10:02:18.393860 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ad8281f-ae5e-4f60-8915-da0ca06e66c1" containerName="registry-server" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.393867 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ad8281f-ae5e-4f60-8915-da0ca06e66c1" containerName="registry-server" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.394006 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ad8281f-ae5e-4f60-8915-da0ca06e66c1" containerName="registry-server" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.394018 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="99980a45-f5ca-428d-b285-bc4f72ff8e28" containerName="extract" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.394552 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.397690 4730 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.397993 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.398508 4730 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-n9bkj" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.398947 4730 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.399164 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.406832 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7"] Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.517602 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6vl4\" (UniqueName: \"kubernetes.io/projected/b7580b03-c29b-4b03-84c7-726fecd55064-kube-api-access-c6vl4\") pod \"metallb-operator-controller-manager-574c858f4-cvlx7\" (UID: \"b7580b03-c29b-4b03-84c7-726fecd55064\") " pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.517681 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b7580b03-c29b-4b03-84c7-726fecd55064-webhook-cert\") pod \"metallb-operator-controller-manager-574c858f4-cvlx7\" (UID: \"b7580b03-c29b-4b03-84c7-726fecd55064\") " pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.517713 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b7580b03-c29b-4b03-84c7-726fecd55064-apiservice-cert\") pod \"metallb-operator-controller-manager-574c858f4-cvlx7\" (UID: \"b7580b03-c29b-4b03-84c7-726fecd55064\") " pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.618726 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6vl4\" (UniqueName: \"kubernetes.io/projected/b7580b03-c29b-4b03-84c7-726fecd55064-kube-api-access-c6vl4\") pod \"metallb-operator-controller-manager-574c858f4-cvlx7\" (UID: \"b7580b03-c29b-4b03-84c7-726fecd55064\") " pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.618785 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b7580b03-c29b-4b03-84c7-726fecd55064-webhook-cert\") pod \"metallb-operator-controller-manager-574c858f4-cvlx7\" (UID: \"b7580b03-c29b-4b03-84c7-726fecd55064\") " pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.618815 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b7580b03-c29b-4b03-84c7-726fecd55064-apiservice-cert\") pod \"metallb-operator-controller-manager-574c858f4-cvlx7\" (UID: \"b7580b03-c29b-4b03-84c7-726fecd55064\") " pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.627835 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b7580b03-c29b-4b03-84c7-726fecd55064-apiservice-cert\") pod \"metallb-operator-controller-manager-574c858f4-cvlx7\" (UID: \"b7580b03-c29b-4b03-84c7-726fecd55064\") " pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.637254 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b7580b03-c29b-4b03-84c7-726fecd55064-webhook-cert\") pod \"metallb-operator-controller-manager-574c858f4-cvlx7\" (UID: \"b7580b03-c29b-4b03-84c7-726fecd55064\") " pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.641199 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6vl4\" (UniqueName: \"kubernetes.io/projected/b7580b03-c29b-4b03-84c7-726fecd55064-kube-api-access-c6vl4\") pod \"metallb-operator-controller-manager-574c858f4-cvlx7\" (UID: \"b7580b03-c29b-4b03-84c7-726fecd55064\") " pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.711378 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.822071 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-666698b878-dkzwg"] Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.822950 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.825134 4730 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-qb49p" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.825184 4730 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.826060 4730 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.848101 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-666698b878-dkzwg"] Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.923402 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5c1b3278-f9f8-41c3-a42f-d789aaaba651-apiservice-cert\") pod \"metallb-operator-webhook-server-666698b878-dkzwg\" (UID: \"5c1b3278-f9f8-41c3-a42f-d789aaaba651\") " pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.923455 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj57c\" (UniqueName: \"kubernetes.io/projected/5c1b3278-f9f8-41c3-a42f-d789aaaba651-kube-api-access-mj57c\") pod \"metallb-operator-webhook-server-666698b878-dkzwg\" (UID: \"5c1b3278-f9f8-41c3-a42f-d789aaaba651\") " pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.923512 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5c1b3278-f9f8-41c3-a42f-d789aaaba651-webhook-cert\") pod \"metallb-operator-webhook-server-666698b878-dkzwg\" (UID: \"5c1b3278-f9f8-41c3-a42f-d789aaaba651\") " pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" Sep 30 10:02:18 crc kubenswrapper[4730]: I0930 10:02:18.990789 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cxgwv" podUID="a0490cc9-3d97-4a6e-a202-b1a1143faa84" containerName="registry-server" probeResult="failure" output=< Sep 30 10:02:18 crc kubenswrapper[4730]: timeout: failed to connect service ":50051" within 1s Sep 30 10:02:18 crc kubenswrapper[4730]: > Sep 30 10:02:19 crc kubenswrapper[4730]: I0930 10:02:19.024956 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5c1b3278-f9f8-41c3-a42f-d789aaaba651-webhook-cert\") pod \"metallb-operator-webhook-server-666698b878-dkzwg\" (UID: \"5c1b3278-f9f8-41c3-a42f-d789aaaba651\") " pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" Sep 30 10:02:19 crc kubenswrapper[4730]: I0930 10:02:19.025077 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5c1b3278-f9f8-41c3-a42f-d789aaaba651-apiservice-cert\") pod \"metallb-operator-webhook-server-666698b878-dkzwg\" (UID: \"5c1b3278-f9f8-41c3-a42f-d789aaaba651\") " pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" Sep 30 10:02:19 crc kubenswrapper[4730]: I0930 10:02:19.025106 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj57c\" (UniqueName: \"kubernetes.io/projected/5c1b3278-f9f8-41c3-a42f-d789aaaba651-kube-api-access-mj57c\") pod \"metallb-operator-webhook-server-666698b878-dkzwg\" (UID: \"5c1b3278-f9f8-41c3-a42f-d789aaaba651\") " pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" Sep 30 10:02:19 crc kubenswrapper[4730]: I0930 10:02:19.030133 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5c1b3278-f9f8-41c3-a42f-d789aaaba651-webhook-cert\") pod \"metallb-operator-webhook-server-666698b878-dkzwg\" (UID: \"5c1b3278-f9f8-41c3-a42f-d789aaaba651\") " pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" Sep 30 10:02:19 crc kubenswrapper[4730]: I0930 10:02:19.045319 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5c1b3278-f9f8-41c3-a42f-d789aaaba651-apiservice-cert\") pod \"metallb-operator-webhook-server-666698b878-dkzwg\" (UID: \"5c1b3278-f9f8-41c3-a42f-d789aaaba651\") " pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" Sep 30 10:02:19 crc kubenswrapper[4730]: I0930 10:02:19.053007 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj57c\" (UniqueName: \"kubernetes.io/projected/5c1b3278-f9f8-41c3-a42f-d789aaaba651-kube-api-access-mj57c\") pod \"metallb-operator-webhook-server-666698b878-dkzwg\" (UID: \"5c1b3278-f9f8-41c3-a42f-d789aaaba651\") " pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" Sep 30 10:02:19 crc kubenswrapper[4730]: I0930 10:02:19.176681 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" Sep 30 10:02:19 crc kubenswrapper[4730]: I0930 10:02:19.251498 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7"] Sep 30 10:02:19 crc kubenswrapper[4730]: W0930 10:02:19.261913 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb7580b03_c29b_4b03_84c7_726fecd55064.slice/crio-c407407e822e7d9420c9d821766bcc5b1003838495a7be834bb99781712acf74 WatchSource:0}: Error finding container c407407e822e7d9420c9d821766bcc5b1003838495a7be834bb99781712acf74: Status 404 returned error can't find the container with id c407407e822e7d9420c9d821766bcc5b1003838495a7be834bb99781712acf74 Sep 30 10:02:19 crc kubenswrapper[4730]: I0930 10:02:19.339132 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" event={"ID":"b7580b03-c29b-4b03-84c7-726fecd55064","Type":"ContainerStarted","Data":"c407407e822e7d9420c9d821766bcc5b1003838495a7be834bb99781712acf74"} Sep 30 10:02:19 crc kubenswrapper[4730]: I0930 10:02:19.622603 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-666698b878-dkzwg"] Sep 30 10:02:20 crc kubenswrapper[4730]: I0930 10:02:20.351668 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" event={"ID":"5c1b3278-f9f8-41c3-a42f-d789aaaba651","Type":"ContainerStarted","Data":"f9b69d92267376e3cd319f32970af4b872e9f39bf37a0f8df74d0968e9dd33dd"} Sep 30 10:02:25 crc kubenswrapper[4730]: I0930 10:02:25.380941 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" event={"ID":"5c1b3278-f9f8-41c3-a42f-d789aaaba651","Type":"ContainerStarted","Data":"b993e244cb920e60bb644ef446f60eece88a8799f895cf9ded3b1e954241137d"} Sep 30 10:02:25 crc kubenswrapper[4730]: I0930 10:02:25.381459 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" Sep 30 10:02:25 crc kubenswrapper[4730]: I0930 10:02:25.383066 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" event={"ID":"b7580b03-c29b-4b03-84c7-726fecd55064","Type":"ContainerStarted","Data":"abbaf2183c7297a3a778cbf4d7933fdd619fd2ea5c27dee583b4360da7bc3b9f"} Sep 30 10:02:25 crc kubenswrapper[4730]: I0930 10:02:25.383201 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" Sep 30 10:02:25 crc kubenswrapper[4730]: I0930 10:02:25.404071 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" podStartSLOduration=1.914712231 podStartE2EDuration="7.404052387s" podCreationTimestamp="2025-09-30 10:02:18 +0000 UTC" firstStartedPulling="2025-09-30 10:02:19.636292881 +0000 UTC m=+783.969552874" lastFinishedPulling="2025-09-30 10:02:25.125633037 +0000 UTC m=+789.458893030" observedRunningTime="2025-09-30 10:02:25.398042037 +0000 UTC m=+789.731302040" watchObservedRunningTime="2025-09-30 10:02:25.404052387 +0000 UTC m=+789.737312380" Sep 30 10:02:25 crc kubenswrapper[4730]: I0930 10:02:25.423560 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" podStartSLOduration=1.578234691 podStartE2EDuration="7.423542191s" podCreationTimestamp="2025-09-30 10:02:18 +0000 UTC" firstStartedPulling="2025-09-30 10:02:19.264800256 +0000 UTC m=+783.598060249" lastFinishedPulling="2025-09-30 10:02:25.110107756 +0000 UTC m=+789.443367749" observedRunningTime="2025-09-30 10:02:25.420152465 +0000 UTC m=+789.753412468" watchObservedRunningTime="2025-09-30 10:02:25.423542191 +0000 UTC m=+789.756802184" Sep 30 10:02:27 crc kubenswrapper[4730]: I0930 10:02:27.987189 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:28 crc kubenswrapper[4730]: I0930 10:02:28.043035 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:28 crc kubenswrapper[4730]: I0930 10:02:28.214879 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cxgwv"] Sep 30 10:02:29 crc kubenswrapper[4730]: I0930 10:02:29.406034 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cxgwv" podUID="a0490cc9-3d97-4a6e-a202-b1a1143faa84" containerName="registry-server" containerID="cri-o://1921a193b668cc44a56cf32afc0ef1a62c3d549295981e6003fa9872891df754" gracePeriod=2 Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.267057 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.291327 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0490cc9-3d97-4a6e-a202-b1a1143faa84-catalog-content\") pod \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\" (UID: \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\") " Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.291456 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v44dq\" (UniqueName: \"kubernetes.io/projected/a0490cc9-3d97-4a6e-a202-b1a1143faa84-kube-api-access-v44dq\") pod \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\" (UID: \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\") " Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.291576 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0490cc9-3d97-4a6e-a202-b1a1143faa84-utilities\") pod \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\" (UID: \"a0490cc9-3d97-4a6e-a202-b1a1143faa84\") " Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.294515 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0490cc9-3d97-4a6e-a202-b1a1143faa84-utilities" (OuterVolumeSpecName: "utilities") pod "a0490cc9-3d97-4a6e-a202-b1a1143faa84" (UID: "a0490cc9-3d97-4a6e-a202-b1a1143faa84"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.304525 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0490cc9-3d97-4a6e-a202-b1a1143faa84-kube-api-access-v44dq" (OuterVolumeSpecName: "kube-api-access-v44dq") pod "a0490cc9-3d97-4a6e-a202-b1a1143faa84" (UID: "a0490cc9-3d97-4a6e-a202-b1a1143faa84"). InnerVolumeSpecName "kube-api-access-v44dq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.370172 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0490cc9-3d97-4a6e-a202-b1a1143faa84-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a0490cc9-3d97-4a6e-a202-b1a1143faa84" (UID: "a0490cc9-3d97-4a6e-a202-b1a1143faa84"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.394911 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0490cc9-3d97-4a6e-a202-b1a1143faa84-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.394978 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v44dq\" (UniqueName: \"kubernetes.io/projected/a0490cc9-3d97-4a6e-a202-b1a1143faa84-kube-api-access-v44dq\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.394992 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0490cc9-3d97-4a6e-a202-b1a1143faa84-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.419701 4730 generic.go:334] "Generic (PLEG): container finished" podID="a0490cc9-3d97-4a6e-a202-b1a1143faa84" containerID="1921a193b668cc44a56cf32afc0ef1a62c3d549295981e6003fa9872891df754" exitCode=0 Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.419758 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxgwv" event={"ID":"a0490cc9-3d97-4a6e-a202-b1a1143faa84","Type":"ContainerDied","Data":"1921a193b668cc44a56cf32afc0ef1a62c3d549295981e6003fa9872891df754"} Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.419799 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxgwv" event={"ID":"a0490cc9-3d97-4a6e-a202-b1a1143faa84","Type":"ContainerDied","Data":"7fae99f5c820c6498fbb3f0ab08a3e95838fe8382a33bb5774bff9cae791e2cd"} Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.419819 4730 scope.go:117] "RemoveContainer" containerID="1921a193b668cc44a56cf32afc0ef1a62c3d549295981e6003fa9872891df754" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.419971 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cxgwv" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.445551 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cxgwv"] Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.446249 4730 scope.go:117] "RemoveContainer" containerID="bbe5d088c078beff851eb13701182a063f724e97b9130f8b78b239227a4200f5" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.449594 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cxgwv"] Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.462849 4730 scope.go:117] "RemoveContainer" containerID="7e487fcf14b1f6ce664f2b176afd2255e2eb534b9fd3a812db80248d7231447f" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.489908 4730 scope.go:117] "RemoveContainer" containerID="1921a193b668cc44a56cf32afc0ef1a62c3d549295981e6003fa9872891df754" Sep 30 10:02:30 crc kubenswrapper[4730]: E0930 10:02:30.491250 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1921a193b668cc44a56cf32afc0ef1a62c3d549295981e6003fa9872891df754\": container with ID starting with 1921a193b668cc44a56cf32afc0ef1a62c3d549295981e6003fa9872891df754 not found: ID does not exist" containerID="1921a193b668cc44a56cf32afc0ef1a62c3d549295981e6003fa9872891df754" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.491298 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1921a193b668cc44a56cf32afc0ef1a62c3d549295981e6003fa9872891df754"} err="failed to get container status \"1921a193b668cc44a56cf32afc0ef1a62c3d549295981e6003fa9872891df754\": rpc error: code = NotFound desc = could not find container \"1921a193b668cc44a56cf32afc0ef1a62c3d549295981e6003fa9872891df754\": container with ID starting with 1921a193b668cc44a56cf32afc0ef1a62c3d549295981e6003fa9872891df754 not found: ID does not exist" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.491332 4730 scope.go:117] "RemoveContainer" containerID="bbe5d088c078beff851eb13701182a063f724e97b9130f8b78b239227a4200f5" Sep 30 10:02:30 crc kubenswrapper[4730]: E0930 10:02:30.492447 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbe5d088c078beff851eb13701182a063f724e97b9130f8b78b239227a4200f5\": container with ID starting with bbe5d088c078beff851eb13701182a063f724e97b9130f8b78b239227a4200f5 not found: ID does not exist" containerID="bbe5d088c078beff851eb13701182a063f724e97b9130f8b78b239227a4200f5" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.492485 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbe5d088c078beff851eb13701182a063f724e97b9130f8b78b239227a4200f5"} err="failed to get container status \"bbe5d088c078beff851eb13701182a063f724e97b9130f8b78b239227a4200f5\": rpc error: code = NotFound desc = could not find container \"bbe5d088c078beff851eb13701182a063f724e97b9130f8b78b239227a4200f5\": container with ID starting with bbe5d088c078beff851eb13701182a063f724e97b9130f8b78b239227a4200f5 not found: ID does not exist" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.492509 4730 scope.go:117] "RemoveContainer" containerID="7e487fcf14b1f6ce664f2b176afd2255e2eb534b9fd3a812db80248d7231447f" Sep 30 10:02:30 crc kubenswrapper[4730]: E0930 10:02:30.495538 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e487fcf14b1f6ce664f2b176afd2255e2eb534b9fd3a812db80248d7231447f\": container with ID starting with 7e487fcf14b1f6ce664f2b176afd2255e2eb534b9fd3a812db80248d7231447f not found: ID does not exist" containerID="7e487fcf14b1f6ce664f2b176afd2255e2eb534b9fd3a812db80248d7231447f" Sep 30 10:02:30 crc kubenswrapper[4730]: I0930 10:02:30.495583 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e487fcf14b1f6ce664f2b176afd2255e2eb534b9fd3a812db80248d7231447f"} err="failed to get container status \"7e487fcf14b1f6ce664f2b176afd2255e2eb534b9fd3a812db80248d7231447f\": rpc error: code = NotFound desc = could not find container \"7e487fcf14b1f6ce664f2b176afd2255e2eb534b9fd3a812db80248d7231447f\": container with ID starting with 7e487fcf14b1f6ce664f2b176afd2255e2eb534b9fd3a812db80248d7231447f not found: ID does not exist" Sep 30 10:02:32 crc kubenswrapper[4730]: I0930 10:02:32.387558 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0490cc9-3d97-4a6e-a202-b1a1143faa84" path="/var/lib/kubelet/pods/a0490cc9-3d97-4a6e-a202-b1a1143faa84/volumes" Sep 30 10:02:39 crc kubenswrapper[4730]: I0930 10:02:39.181374 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-666698b878-dkzwg" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.407784 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cgfsm"] Sep 30 10:02:43 crc kubenswrapper[4730]: E0930 10:02:43.408324 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0490cc9-3d97-4a6e-a202-b1a1143faa84" containerName="extract-content" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.408338 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0490cc9-3d97-4a6e-a202-b1a1143faa84" containerName="extract-content" Sep 30 10:02:43 crc kubenswrapper[4730]: E0930 10:02:43.408351 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0490cc9-3d97-4a6e-a202-b1a1143faa84" containerName="extract-utilities" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.408359 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0490cc9-3d97-4a6e-a202-b1a1143faa84" containerName="extract-utilities" Sep 30 10:02:43 crc kubenswrapper[4730]: E0930 10:02:43.408373 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0490cc9-3d97-4a6e-a202-b1a1143faa84" containerName="registry-server" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.408382 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0490cc9-3d97-4a6e-a202-b1a1143faa84" containerName="registry-server" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.408511 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0490cc9-3d97-4a6e-a202-b1a1143faa84" containerName="registry-server" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.409503 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.418655 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cgfsm"] Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.561011 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-catalog-content\") pod \"certified-operators-cgfsm\" (UID: \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\") " pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.561106 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-utilities\") pod \"certified-operators-cgfsm\" (UID: \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\") " pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.561127 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvstj\" (UniqueName: \"kubernetes.io/projected/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-kube-api-access-rvstj\") pod \"certified-operators-cgfsm\" (UID: \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\") " pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.662126 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-utilities\") pod \"certified-operators-cgfsm\" (UID: \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\") " pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.662171 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvstj\" (UniqueName: \"kubernetes.io/projected/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-kube-api-access-rvstj\") pod \"certified-operators-cgfsm\" (UID: \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\") " pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.662217 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-catalog-content\") pod \"certified-operators-cgfsm\" (UID: \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\") " pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.662631 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-catalog-content\") pod \"certified-operators-cgfsm\" (UID: \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\") " pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.662849 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-utilities\") pod \"certified-operators-cgfsm\" (UID: \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\") " pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.686492 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvstj\" (UniqueName: \"kubernetes.io/projected/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-kube-api-access-rvstj\") pod \"certified-operators-cgfsm\" (UID: \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\") " pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:43 crc kubenswrapper[4730]: I0930 10:02:43.727361 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:44 crc kubenswrapper[4730]: I0930 10:02:44.026974 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cgfsm"] Sep 30 10:02:44 crc kubenswrapper[4730]: I0930 10:02:44.501329 4730 generic.go:334] "Generic (PLEG): container finished" podID="05e9d108-9923-450e-b0cb-9cf6a5bc4a54" containerID="3f44dcf1a0d6fd522e4b234f6291f95eb85a838316107bafb3c3a8da0eb15005" exitCode=0 Sep 30 10:02:44 crc kubenswrapper[4730]: I0930 10:02:44.501381 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgfsm" event={"ID":"05e9d108-9923-450e-b0cb-9cf6a5bc4a54","Type":"ContainerDied","Data":"3f44dcf1a0d6fd522e4b234f6291f95eb85a838316107bafb3c3a8da0eb15005"} Sep 30 10:02:44 crc kubenswrapper[4730]: I0930 10:02:44.501421 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgfsm" event={"ID":"05e9d108-9923-450e-b0cb-9cf6a5bc4a54","Type":"ContainerStarted","Data":"3ab7b0575d641b8f90c0a7fe93a0ff5fe7d29d9874eeb0559a2d98ecf9556ac1"} Sep 30 10:02:48 crc kubenswrapper[4730]: I0930 10:02:48.535832 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgfsm" event={"ID":"05e9d108-9923-450e-b0cb-9cf6a5bc4a54","Type":"ContainerStarted","Data":"59af014563d2f4851a152f9f6375def1fba9ee40901e9fd3a8c3177f9ad0b898"} Sep 30 10:02:48 crc kubenswrapper[4730]: I0930 10:02:48.966027 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bcn2n"] Sep 30 10:02:48 crc kubenswrapper[4730]: I0930 10:02:48.967861 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:48 crc kubenswrapper[4730]: I0930 10:02:48.984550 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bcn2n"] Sep 30 10:02:49 crc kubenswrapper[4730]: I0930 10:02:49.047736 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e760bac-01d0-49ab-9b19-fd35865e761b-catalog-content\") pod \"community-operators-bcn2n\" (UID: \"2e760bac-01d0-49ab-9b19-fd35865e761b\") " pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:49 crc kubenswrapper[4730]: I0930 10:02:49.047907 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e760bac-01d0-49ab-9b19-fd35865e761b-utilities\") pod \"community-operators-bcn2n\" (UID: \"2e760bac-01d0-49ab-9b19-fd35865e761b\") " pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:49 crc kubenswrapper[4730]: I0930 10:02:49.047942 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlrnl\" (UniqueName: \"kubernetes.io/projected/2e760bac-01d0-49ab-9b19-fd35865e761b-kube-api-access-tlrnl\") pod \"community-operators-bcn2n\" (UID: \"2e760bac-01d0-49ab-9b19-fd35865e761b\") " pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:49 crc kubenswrapper[4730]: I0930 10:02:49.149786 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e760bac-01d0-49ab-9b19-fd35865e761b-utilities\") pod \"community-operators-bcn2n\" (UID: \"2e760bac-01d0-49ab-9b19-fd35865e761b\") " pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:49 crc kubenswrapper[4730]: I0930 10:02:49.149859 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlrnl\" (UniqueName: \"kubernetes.io/projected/2e760bac-01d0-49ab-9b19-fd35865e761b-kube-api-access-tlrnl\") pod \"community-operators-bcn2n\" (UID: \"2e760bac-01d0-49ab-9b19-fd35865e761b\") " pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:49 crc kubenswrapper[4730]: I0930 10:02:49.149903 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e760bac-01d0-49ab-9b19-fd35865e761b-catalog-content\") pod \"community-operators-bcn2n\" (UID: \"2e760bac-01d0-49ab-9b19-fd35865e761b\") " pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:49 crc kubenswrapper[4730]: I0930 10:02:49.150531 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e760bac-01d0-49ab-9b19-fd35865e761b-catalog-content\") pod \"community-operators-bcn2n\" (UID: \"2e760bac-01d0-49ab-9b19-fd35865e761b\") " pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:49 crc kubenswrapper[4730]: I0930 10:02:49.150547 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e760bac-01d0-49ab-9b19-fd35865e761b-utilities\") pod \"community-operators-bcn2n\" (UID: \"2e760bac-01d0-49ab-9b19-fd35865e761b\") " pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:49 crc kubenswrapper[4730]: I0930 10:02:49.175880 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlrnl\" (UniqueName: \"kubernetes.io/projected/2e760bac-01d0-49ab-9b19-fd35865e761b-kube-api-access-tlrnl\") pod \"community-operators-bcn2n\" (UID: \"2e760bac-01d0-49ab-9b19-fd35865e761b\") " pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:49 crc kubenswrapper[4730]: I0930 10:02:49.283954 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:49 crc kubenswrapper[4730]: I0930 10:02:49.552834 4730 generic.go:334] "Generic (PLEG): container finished" podID="05e9d108-9923-450e-b0cb-9cf6a5bc4a54" containerID="59af014563d2f4851a152f9f6375def1fba9ee40901e9fd3a8c3177f9ad0b898" exitCode=0 Sep 30 10:02:49 crc kubenswrapper[4730]: I0930 10:02:49.552986 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgfsm" event={"ID":"05e9d108-9923-450e-b0cb-9cf6a5bc4a54","Type":"ContainerDied","Data":"59af014563d2f4851a152f9f6375def1fba9ee40901e9fd3a8c3177f9ad0b898"} Sep 30 10:02:49 crc kubenswrapper[4730]: I0930 10:02:49.854549 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bcn2n"] Sep 30 10:02:50 crc kubenswrapper[4730]: I0930 10:02:50.560231 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgfsm" event={"ID":"05e9d108-9923-450e-b0cb-9cf6a5bc4a54","Type":"ContainerStarted","Data":"095b9e654232b717113b6d2129415c6275da1e1499218ae0eed0528f1251702a"} Sep 30 10:02:50 crc kubenswrapper[4730]: I0930 10:02:50.561818 4730 generic.go:334] "Generic (PLEG): container finished" podID="2e760bac-01d0-49ab-9b19-fd35865e761b" containerID="68426d8b857bb19d86447819e59dc42b349f7694a030d5ecd41b0d5684192516" exitCode=0 Sep 30 10:02:50 crc kubenswrapper[4730]: I0930 10:02:50.561853 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bcn2n" event={"ID":"2e760bac-01d0-49ab-9b19-fd35865e761b","Type":"ContainerDied","Data":"68426d8b857bb19d86447819e59dc42b349f7694a030d5ecd41b0d5684192516"} Sep 30 10:02:50 crc kubenswrapper[4730]: I0930 10:02:50.561871 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bcn2n" event={"ID":"2e760bac-01d0-49ab-9b19-fd35865e761b","Type":"ContainerStarted","Data":"c6fc95fc6e9a200157e4234e5023ecd2bf1f8eb161352cb1c09f8bd12be44f91"} Sep 30 10:02:50 crc kubenswrapper[4730]: I0930 10:02:50.579841 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cgfsm" podStartSLOduration=2.140079654 podStartE2EDuration="7.579820223s" podCreationTimestamp="2025-09-30 10:02:43 +0000 UTC" firstStartedPulling="2025-09-30 10:02:44.502785988 +0000 UTC m=+808.836045981" lastFinishedPulling="2025-09-30 10:02:49.942526557 +0000 UTC m=+814.275786550" observedRunningTime="2025-09-30 10:02:50.576569421 +0000 UTC m=+814.909829414" watchObservedRunningTime="2025-09-30 10:02:50.579820223 +0000 UTC m=+814.913080216" Sep 30 10:02:51 crc kubenswrapper[4730]: I0930 10:02:51.570990 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bcn2n" event={"ID":"2e760bac-01d0-49ab-9b19-fd35865e761b","Type":"ContainerStarted","Data":"0404d03e3ff025ba9a67851c57381c46e7f2ee9d23a8f4bea7e82fc6dbdd478b"} Sep 30 10:02:52 crc kubenswrapper[4730]: I0930 10:02:52.577825 4730 generic.go:334] "Generic (PLEG): container finished" podID="2e760bac-01d0-49ab-9b19-fd35865e761b" containerID="0404d03e3ff025ba9a67851c57381c46e7f2ee9d23a8f4bea7e82fc6dbdd478b" exitCode=0 Sep 30 10:02:52 crc kubenswrapper[4730]: I0930 10:02:52.577880 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bcn2n" event={"ID":"2e760bac-01d0-49ab-9b19-fd35865e761b","Type":"ContainerDied","Data":"0404d03e3ff025ba9a67851c57381c46e7f2ee9d23a8f4bea7e82fc6dbdd478b"} Sep 30 10:02:53 crc kubenswrapper[4730]: I0930 10:02:53.728366 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:53 crc kubenswrapper[4730]: I0930 10:02:53.729378 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:53 crc kubenswrapper[4730]: I0930 10:02:53.778452 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:02:56 crc kubenswrapper[4730]: I0930 10:02:56.600660 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bcn2n" event={"ID":"2e760bac-01d0-49ab-9b19-fd35865e761b","Type":"ContainerStarted","Data":"a950602b7172fd4246d0be2de62e43578eeaa25aa8f570f19fb56e0198fd9bdc"} Sep 30 10:02:56 crc kubenswrapper[4730]: I0930 10:02:56.618349 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bcn2n" podStartSLOduration=2.840246542 podStartE2EDuration="8.618332312s" podCreationTimestamp="2025-09-30 10:02:48 +0000 UTC" firstStartedPulling="2025-09-30 10:02:50.563146609 +0000 UTC m=+814.896406602" lastFinishedPulling="2025-09-30 10:02:56.341232379 +0000 UTC m=+820.674492372" observedRunningTime="2025-09-30 10:02:56.6175805 +0000 UTC m=+820.950840503" watchObservedRunningTime="2025-09-30 10:02:56.618332312 +0000 UTC m=+820.951592305" Sep 30 10:02:58 crc kubenswrapper[4730]: I0930 10:02:58.715832 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-574c858f4-cvlx7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.284557 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.284668 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.356753 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.441513 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-rwcp7"] Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.443804 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.445367 4730 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.445625 4730 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-l7s6p" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.445779 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.458871 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg"] Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.459673 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.463178 4730 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.472485 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg"] Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.542399 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-bgftk"] Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.543461 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-bgftk" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.545255 4730 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-5rqlp" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.545431 4730 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.545939 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.546269 4730 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.555260 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-hp2xq"] Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.560195 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-hp2xq" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.562330 4730 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.593435 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c7e58e36-b0e5-4531-9e55-bf09a14d556e-cert\") pod \"frr-k8s-webhook-server-5478bdb765-w96cg\" (UID: \"c7e58e36-b0e5-4531-9e55-bf09a14d556e\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.593533 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c4lv\" (UniqueName: \"kubernetes.io/projected/7718ff95-7f9a-46a9-a0f9-259e60a9f142-kube-api-access-9c4lv\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.593579 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/7718ff95-7f9a-46a9-a0f9-259e60a9f142-frr-startup\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.593641 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7718ff95-7f9a-46a9-a0f9-259e60a9f142-metrics-certs\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.593687 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4d9v\" (UniqueName: \"kubernetes.io/projected/c7e58e36-b0e5-4531-9e55-bf09a14d556e-kube-api-access-n4d9v\") pod \"frr-k8s-webhook-server-5478bdb765-w96cg\" (UID: \"c7e58e36-b0e5-4531-9e55-bf09a14d556e\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.593716 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/7718ff95-7f9a-46a9-a0f9-259e60a9f142-metrics\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.593740 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/7718ff95-7f9a-46a9-a0f9-259e60a9f142-frr-sockets\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.593831 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/7718ff95-7f9a-46a9-a0f9-259e60a9f142-frr-conf\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.593864 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/7718ff95-7f9a-46a9-a0f9-259e60a9f142-reloader\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.596644 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-hp2xq"] Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695041 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4d9v\" (UniqueName: \"kubernetes.io/projected/c7e58e36-b0e5-4531-9e55-bf09a14d556e-kube-api-access-n4d9v\") pod \"frr-k8s-webhook-server-5478bdb765-w96cg\" (UID: \"c7e58e36-b0e5-4531-9e55-bf09a14d556e\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695091 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/7718ff95-7f9a-46a9-a0f9-259e60a9f142-metrics\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695115 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/7718ff95-7f9a-46a9-a0f9-259e60a9f142-frr-sockets\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695143 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcbxk\" (UniqueName: \"kubernetes.io/projected/fac4c47c-e141-4fab-a69b-de3467d806ce-kube-api-access-rcbxk\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695166 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/7718ff95-7f9a-46a9-a0f9-259e60a9f142-frr-conf\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695190 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/fac4c47c-e141-4fab-a69b-de3467d806ce-metallb-excludel2\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695215 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/7718ff95-7f9a-46a9-a0f9-259e60a9f142-reloader\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695244 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zckr\" (UniqueName: \"kubernetes.io/projected/b5cc3ceb-ad9c-4b2d-b272-913a1856afcc-kube-api-access-8zckr\") pod \"controller-5d688f5ffc-hp2xq\" (UID: \"b5cc3ceb-ad9c-4b2d-b272-913a1856afcc\") " pod="metallb-system/controller-5d688f5ffc-hp2xq" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695265 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c7e58e36-b0e5-4531-9e55-bf09a14d556e-cert\") pod \"frr-k8s-webhook-server-5478bdb765-w96cg\" (UID: \"c7e58e36-b0e5-4531-9e55-bf09a14d556e\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695308 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c4lv\" (UniqueName: \"kubernetes.io/projected/7718ff95-7f9a-46a9-a0f9-259e60a9f142-kube-api-access-9c4lv\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695328 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fac4c47c-e141-4fab-a69b-de3467d806ce-metrics-certs\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695363 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/7718ff95-7f9a-46a9-a0f9-259e60a9f142-frr-startup\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695396 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fac4c47c-e141-4fab-a69b-de3467d806ce-memberlist\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695419 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5cc3ceb-ad9c-4b2d-b272-913a1856afcc-metrics-certs\") pod \"controller-5d688f5ffc-hp2xq\" (UID: \"b5cc3ceb-ad9c-4b2d-b272-913a1856afcc\") " pod="metallb-system/controller-5d688f5ffc-hp2xq" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695441 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7718ff95-7f9a-46a9-a0f9-259e60a9f142-metrics-certs\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695472 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b5cc3ceb-ad9c-4b2d-b272-913a1856afcc-cert\") pod \"controller-5d688f5ffc-hp2xq\" (UID: \"b5cc3ceb-ad9c-4b2d-b272-913a1856afcc\") " pod="metallb-system/controller-5d688f5ffc-hp2xq" Sep 30 10:02:59 crc kubenswrapper[4730]: E0930 10:02:59.695577 4730 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Sep 30 10:02:59 crc kubenswrapper[4730]: E0930 10:02:59.695640 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c7e58e36-b0e5-4531-9e55-bf09a14d556e-cert podName:c7e58e36-b0e5-4531-9e55-bf09a14d556e nodeName:}" failed. No retries permitted until 2025-09-30 10:03:00.195603938 +0000 UTC m=+824.528863941 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c7e58e36-b0e5-4531-9e55-bf09a14d556e-cert") pod "frr-k8s-webhook-server-5478bdb765-w96cg" (UID: "c7e58e36-b0e5-4531-9e55-bf09a14d556e") : secret "frr-k8s-webhook-server-cert" not found Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.695664 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/7718ff95-7f9a-46a9-a0f9-259e60a9f142-metrics\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: E0930 10:02:59.695871 4730 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Sep 30 10:02:59 crc kubenswrapper[4730]: E0930 10:02:59.695905 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7718ff95-7f9a-46a9-a0f9-259e60a9f142-metrics-certs podName:7718ff95-7f9a-46a9-a0f9-259e60a9f142 nodeName:}" failed. No retries permitted until 2025-09-30 10:03:00.195894957 +0000 UTC m=+824.529155020 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/7718ff95-7f9a-46a9-a0f9-259e60a9f142-metrics-certs") pod "frr-k8s-rwcp7" (UID: "7718ff95-7f9a-46a9-a0f9-259e60a9f142") : secret "frr-k8s-certs-secret" not found Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.729021 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4d9v\" (UniqueName: \"kubernetes.io/projected/c7e58e36-b0e5-4531-9e55-bf09a14d556e-kube-api-access-n4d9v\") pod \"frr-k8s-webhook-server-5478bdb765-w96cg\" (UID: \"c7e58e36-b0e5-4531-9e55-bf09a14d556e\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.747604 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/7718ff95-7f9a-46a9-a0f9-259e60a9f142-frr-sockets\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.747855 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/7718ff95-7f9a-46a9-a0f9-259e60a9f142-frr-conf\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.747785 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/7718ff95-7f9a-46a9-a0f9-259e60a9f142-reloader\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.748527 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/7718ff95-7f9a-46a9-a0f9-259e60a9f142-frr-startup\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.751621 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c4lv\" (UniqueName: \"kubernetes.io/projected/7718ff95-7f9a-46a9-a0f9-259e60a9f142-kube-api-access-9c4lv\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.796905 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fac4c47c-e141-4fab-a69b-de3467d806ce-metrics-certs\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.796977 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fac4c47c-e141-4fab-a69b-de3467d806ce-memberlist\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.796996 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5cc3ceb-ad9c-4b2d-b272-913a1856afcc-metrics-certs\") pod \"controller-5d688f5ffc-hp2xq\" (UID: \"b5cc3ceb-ad9c-4b2d-b272-913a1856afcc\") " pod="metallb-system/controller-5d688f5ffc-hp2xq" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.797031 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b5cc3ceb-ad9c-4b2d-b272-913a1856afcc-cert\") pod \"controller-5d688f5ffc-hp2xq\" (UID: \"b5cc3ceb-ad9c-4b2d-b272-913a1856afcc\") " pod="metallb-system/controller-5d688f5ffc-hp2xq" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.797062 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcbxk\" (UniqueName: \"kubernetes.io/projected/fac4c47c-e141-4fab-a69b-de3467d806ce-kube-api-access-rcbxk\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.797087 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/fac4c47c-e141-4fab-a69b-de3467d806ce-metallb-excludel2\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.797120 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zckr\" (UniqueName: \"kubernetes.io/projected/b5cc3ceb-ad9c-4b2d-b272-913a1856afcc-kube-api-access-8zckr\") pod \"controller-5d688f5ffc-hp2xq\" (UID: \"b5cc3ceb-ad9c-4b2d-b272-913a1856afcc\") " pod="metallb-system/controller-5d688f5ffc-hp2xq" Sep 30 10:02:59 crc kubenswrapper[4730]: E0930 10:02:59.798276 4730 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 10:02:59 crc kubenswrapper[4730]: E0930 10:02:59.798335 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fac4c47c-e141-4fab-a69b-de3467d806ce-memberlist podName:fac4c47c-e141-4fab-a69b-de3467d806ce nodeName:}" failed. No retries permitted until 2025-09-30 10:03:00.298316967 +0000 UTC m=+824.631576970 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/fac4c47c-e141-4fab-a69b-de3467d806ce-memberlist") pod "speaker-bgftk" (UID: "fac4c47c-e141-4fab-a69b-de3467d806ce") : secret "metallb-memberlist" not found Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.799196 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/fac4c47c-e141-4fab-a69b-de3467d806ce-metallb-excludel2\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.802716 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b5cc3ceb-ad9c-4b2d-b272-913a1856afcc-cert\") pod \"controller-5d688f5ffc-hp2xq\" (UID: \"b5cc3ceb-ad9c-4b2d-b272-913a1856afcc\") " pod="metallb-system/controller-5d688f5ffc-hp2xq" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.804130 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fac4c47c-e141-4fab-a69b-de3467d806ce-metrics-certs\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.804162 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b5cc3ceb-ad9c-4b2d-b272-913a1856afcc-metrics-certs\") pod \"controller-5d688f5ffc-hp2xq\" (UID: \"b5cc3ceb-ad9c-4b2d-b272-913a1856afcc\") " pod="metallb-system/controller-5d688f5ffc-hp2xq" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.814461 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcbxk\" (UniqueName: \"kubernetes.io/projected/fac4c47c-e141-4fab-a69b-de3467d806ce-kube-api-access-rcbxk\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.814755 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zckr\" (UniqueName: \"kubernetes.io/projected/b5cc3ceb-ad9c-4b2d-b272-913a1856afcc-kube-api-access-8zckr\") pod \"controller-5d688f5ffc-hp2xq\" (UID: \"b5cc3ceb-ad9c-4b2d-b272-913a1856afcc\") " pod="metallb-system/controller-5d688f5ffc-hp2xq" Sep 30 10:02:59 crc kubenswrapper[4730]: I0930 10:02:59.898305 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-hp2xq" Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.099080 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-hp2xq"] Sep 30 10:03:00 crc kubenswrapper[4730]: W0930 10:03:00.103412 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5cc3ceb_ad9c_4b2d_b272_913a1856afcc.slice/crio-faa0a80586cd688b759ff782a92fdd4bcc86dab85cad332eefe33cba60e81dae WatchSource:0}: Error finding container faa0a80586cd688b759ff782a92fdd4bcc86dab85cad332eefe33cba60e81dae: Status 404 returned error can't find the container with id faa0a80586cd688b759ff782a92fdd4bcc86dab85cad332eefe33cba60e81dae Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.207159 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7718ff95-7f9a-46a9-a0f9-259e60a9f142-metrics-certs\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.207573 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c7e58e36-b0e5-4531-9e55-bf09a14d556e-cert\") pod \"frr-k8s-webhook-server-5478bdb765-w96cg\" (UID: \"c7e58e36-b0e5-4531-9e55-bf09a14d556e\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.210439 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7718ff95-7f9a-46a9-a0f9-259e60a9f142-metrics-certs\") pod \"frr-k8s-rwcp7\" (UID: \"7718ff95-7f9a-46a9-a0f9-259e60a9f142\") " pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.210988 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c7e58e36-b0e5-4531-9e55-bf09a14d556e-cert\") pod \"frr-k8s-webhook-server-5478bdb765-w96cg\" (UID: \"c7e58e36-b0e5-4531-9e55-bf09a14d556e\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.309308 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fac4c47c-e141-4fab-a69b-de3467d806ce-memberlist\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:03:00 crc kubenswrapper[4730]: E0930 10:03:00.309591 4730 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 10:03:00 crc kubenswrapper[4730]: E0930 10:03:00.309692 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fac4c47c-e141-4fab-a69b-de3467d806ce-memberlist podName:fac4c47c-e141-4fab-a69b-de3467d806ce nodeName:}" failed. No retries permitted until 2025-09-30 10:03:01.309671415 +0000 UTC m=+825.642931408 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/fac4c47c-e141-4fab-a69b-de3467d806ce-memberlist") pod "speaker-bgftk" (UID: "fac4c47c-e141-4fab-a69b-de3467d806ce") : secret "metallb-memberlist" not found Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.360814 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.374740 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.629439 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rwcp7" event={"ID":"7718ff95-7f9a-46a9-a0f9-259e60a9f142","Type":"ContainerStarted","Data":"c6743d7894bf911a9f6fac828f6f2a9f94cd18520dc0993827ad6235a1a70db6"} Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.631234 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-hp2xq" event={"ID":"b5cc3ceb-ad9c-4b2d-b272-913a1856afcc","Type":"ContainerStarted","Data":"0df1a13560330e019ec5f6f29ecc43d79b8c3504a9ebb50a0ab05f1f429ee890"} Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.631306 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-hp2xq" event={"ID":"b5cc3ceb-ad9c-4b2d-b272-913a1856afcc","Type":"ContainerStarted","Data":"fa77a17ed1d2325cb5e4fe5af2cefa9cc6e11f814e828b8edfcc4acb4901ccad"} Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.631325 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-hp2xq" event={"ID":"b5cc3ceb-ad9c-4b2d-b272-913a1856afcc","Type":"ContainerStarted","Data":"faa0a80586cd688b759ff782a92fdd4bcc86dab85cad332eefe33cba60e81dae"} Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.631363 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-hp2xq" Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.756789 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-hp2xq" podStartSLOduration=1.756772707 podStartE2EDuration="1.756772707s" podCreationTimestamp="2025-09-30 10:02:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:03:00.650652133 +0000 UTC m=+824.983912126" watchObservedRunningTime="2025-09-30 10:03:00.756772707 +0000 UTC m=+825.090032700" Sep 30 10:03:00 crc kubenswrapper[4730]: I0930 10:03:00.757936 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg"] Sep 30 10:03:00 crc kubenswrapper[4730]: W0930 10:03:00.760142 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7e58e36_b0e5_4531_9e55_bf09a14d556e.slice/crio-8b65653c8d5f789891bda54df4e996ba76c2eb4e07e2992c6d0c778bfb7016ef WatchSource:0}: Error finding container 8b65653c8d5f789891bda54df4e996ba76c2eb4e07e2992c6d0c778bfb7016ef: Status 404 returned error can't find the container with id 8b65653c8d5f789891bda54df4e996ba76c2eb4e07e2992c6d0c778bfb7016ef Sep 30 10:03:01 crc kubenswrapper[4730]: I0930 10:03:01.323853 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fac4c47c-e141-4fab-a69b-de3467d806ce-memberlist\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:03:01 crc kubenswrapper[4730]: I0930 10:03:01.331279 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fac4c47c-e141-4fab-a69b-de3467d806ce-memberlist\") pod \"speaker-bgftk\" (UID: \"fac4c47c-e141-4fab-a69b-de3467d806ce\") " pod="metallb-system/speaker-bgftk" Sep 30 10:03:01 crc kubenswrapper[4730]: I0930 10:03:01.379836 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-bgftk" Sep 30 10:03:01 crc kubenswrapper[4730]: W0930 10:03:01.403092 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfac4c47c_e141_4fab_a69b_de3467d806ce.slice/crio-98c24d67d0d62092a3877c69d354458cf46fc2415a92db45a685bbc398543ec9 WatchSource:0}: Error finding container 98c24d67d0d62092a3877c69d354458cf46fc2415a92db45a685bbc398543ec9: Status 404 returned error can't find the container with id 98c24d67d0d62092a3877c69d354458cf46fc2415a92db45a685bbc398543ec9 Sep 30 10:03:01 crc kubenswrapper[4730]: I0930 10:03:01.638795 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bgftk" event={"ID":"fac4c47c-e141-4fab-a69b-de3467d806ce","Type":"ContainerStarted","Data":"b44bd8554550d9630562dc9d7a11406d60ad76bde29c868e571ac989a4a8722c"} Sep 30 10:03:01 crc kubenswrapper[4730]: I0930 10:03:01.639189 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bgftk" event={"ID":"fac4c47c-e141-4fab-a69b-de3467d806ce","Type":"ContainerStarted","Data":"98c24d67d0d62092a3877c69d354458cf46fc2415a92db45a685bbc398543ec9"} Sep 30 10:03:01 crc kubenswrapper[4730]: I0930 10:03:01.640348 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" event={"ID":"c7e58e36-b0e5-4531-9e55-bf09a14d556e","Type":"ContainerStarted","Data":"8b65653c8d5f789891bda54df4e996ba76c2eb4e07e2992c6d0c778bfb7016ef"} Sep 30 10:03:02 crc kubenswrapper[4730]: I0930 10:03:02.653951 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-bgftk" event={"ID":"fac4c47c-e141-4fab-a69b-de3467d806ce","Type":"ContainerStarted","Data":"bcc7d17659fb4183b9a9359d947b4644d6427588404f71ee378cd089b8f80c4a"} Sep 30 10:03:02 crc kubenswrapper[4730]: I0930 10:03:02.654142 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-bgftk" Sep 30 10:03:02 crc kubenswrapper[4730]: I0930 10:03:02.676454 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-bgftk" podStartSLOduration=3.676391914 podStartE2EDuration="3.676391914s" podCreationTimestamp="2025-09-30 10:02:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:03:02.671584388 +0000 UTC m=+827.004844381" watchObservedRunningTime="2025-09-30 10:03:02.676391914 +0000 UTC m=+827.009651907" Sep 30 10:03:03 crc kubenswrapper[4730]: I0930 10:03:03.839114 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 10:03:03 crc kubenswrapper[4730]: I0930 10:03:03.959603 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cgfsm"] Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.012046 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jr5df"] Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.012360 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jr5df" podUID="1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" containerName="registry-server" containerID="cri-o://4a228e88b8dec09568b673ea4953b23b8547bb53e34c39e88a01451d16c00952" gracePeriod=2 Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.495758 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jr5df" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.696419 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmxst\" (UniqueName: \"kubernetes.io/projected/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-kube-api-access-jmxst\") pod \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\" (UID: \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\") " Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.696492 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-catalog-content\") pod \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\" (UID: \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\") " Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.696626 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-utilities\") pod \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\" (UID: \"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431\") " Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.699451 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-utilities" (OuterVolumeSpecName: "utilities") pod "1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" (UID: "1b5268d8-ebc0-4df3-8be8-f0ed2ec66431"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.717856 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-kube-api-access-jmxst" (OuterVolumeSpecName: "kube-api-access-jmxst") pod "1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" (UID: "1b5268d8-ebc0-4df3-8be8-f0ed2ec66431"). InnerVolumeSpecName "kube-api-access-jmxst". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.727354 4730 generic.go:334] "Generic (PLEG): container finished" podID="1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" containerID="4a228e88b8dec09568b673ea4953b23b8547bb53e34c39e88a01451d16c00952" exitCode=0 Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.727675 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jr5df" event={"ID":"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431","Type":"ContainerDied","Data":"4a228e88b8dec09568b673ea4953b23b8547bb53e34c39e88a01451d16c00952"} Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.727727 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jr5df" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.727745 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jr5df" event={"ID":"1b5268d8-ebc0-4df3-8be8-f0ed2ec66431","Type":"ContainerDied","Data":"074622f8b918635cb3f6e90107962e235fe2fe3efab5e980ec02ff88e5b2fc5e"} Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.727785 4730 scope.go:117] "RemoveContainer" containerID="4a228e88b8dec09568b673ea4953b23b8547bb53e34c39e88a01451d16c00952" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.751668 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" (UID: "1b5268d8-ebc0-4df3-8be8-f0ed2ec66431"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.758726 4730 scope.go:117] "RemoveContainer" containerID="e78b976a73a1c907cfad9f7c6ada91b1ec8ae5cfee49439908b7ff86d3a26289" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.786370 4730 scope.go:117] "RemoveContainer" containerID="474ea612077c4839713c0596b54c15e12b3025be1808e620255c5ffecb9a1657" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.799079 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.799120 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.799134 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmxst\" (UniqueName: \"kubernetes.io/projected/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431-kube-api-access-jmxst\") on node \"crc\" DevicePath \"\"" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.814774 4730 scope.go:117] "RemoveContainer" containerID="4a228e88b8dec09568b673ea4953b23b8547bb53e34c39e88a01451d16c00952" Sep 30 10:03:04 crc kubenswrapper[4730]: E0930 10:03:04.821546 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a228e88b8dec09568b673ea4953b23b8547bb53e34c39e88a01451d16c00952\": container with ID starting with 4a228e88b8dec09568b673ea4953b23b8547bb53e34c39e88a01451d16c00952 not found: ID does not exist" containerID="4a228e88b8dec09568b673ea4953b23b8547bb53e34c39e88a01451d16c00952" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.821622 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a228e88b8dec09568b673ea4953b23b8547bb53e34c39e88a01451d16c00952"} err="failed to get container status \"4a228e88b8dec09568b673ea4953b23b8547bb53e34c39e88a01451d16c00952\": rpc error: code = NotFound desc = could not find container \"4a228e88b8dec09568b673ea4953b23b8547bb53e34c39e88a01451d16c00952\": container with ID starting with 4a228e88b8dec09568b673ea4953b23b8547bb53e34c39e88a01451d16c00952 not found: ID does not exist" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.821658 4730 scope.go:117] "RemoveContainer" containerID="e78b976a73a1c907cfad9f7c6ada91b1ec8ae5cfee49439908b7ff86d3a26289" Sep 30 10:03:04 crc kubenswrapper[4730]: E0930 10:03:04.822189 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e78b976a73a1c907cfad9f7c6ada91b1ec8ae5cfee49439908b7ff86d3a26289\": container with ID starting with e78b976a73a1c907cfad9f7c6ada91b1ec8ae5cfee49439908b7ff86d3a26289 not found: ID does not exist" containerID="e78b976a73a1c907cfad9f7c6ada91b1ec8ae5cfee49439908b7ff86d3a26289" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.822234 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e78b976a73a1c907cfad9f7c6ada91b1ec8ae5cfee49439908b7ff86d3a26289"} err="failed to get container status \"e78b976a73a1c907cfad9f7c6ada91b1ec8ae5cfee49439908b7ff86d3a26289\": rpc error: code = NotFound desc = could not find container \"e78b976a73a1c907cfad9f7c6ada91b1ec8ae5cfee49439908b7ff86d3a26289\": container with ID starting with e78b976a73a1c907cfad9f7c6ada91b1ec8ae5cfee49439908b7ff86d3a26289 not found: ID does not exist" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.822263 4730 scope.go:117] "RemoveContainer" containerID="474ea612077c4839713c0596b54c15e12b3025be1808e620255c5ffecb9a1657" Sep 30 10:03:04 crc kubenswrapper[4730]: E0930 10:03:04.822553 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"474ea612077c4839713c0596b54c15e12b3025be1808e620255c5ffecb9a1657\": container with ID starting with 474ea612077c4839713c0596b54c15e12b3025be1808e620255c5ffecb9a1657 not found: ID does not exist" containerID="474ea612077c4839713c0596b54c15e12b3025be1808e620255c5ffecb9a1657" Sep 30 10:03:04 crc kubenswrapper[4730]: I0930 10:03:04.822580 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"474ea612077c4839713c0596b54c15e12b3025be1808e620255c5ffecb9a1657"} err="failed to get container status \"474ea612077c4839713c0596b54c15e12b3025be1808e620255c5ffecb9a1657\": rpc error: code = NotFound desc = could not find container \"474ea612077c4839713c0596b54c15e12b3025be1808e620255c5ffecb9a1657\": container with ID starting with 474ea612077c4839713c0596b54c15e12b3025be1808e620255c5ffecb9a1657 not found: ID does not exist" Sep 30 10:03:05 crc kubenswrapper[4730]: I0930 10:03:05.062674 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jr5df"] Sep 30 10:03:05 crc kubenswrapper[4730]: I0930 10:03:05.066300 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jr5df"] Sep 30 10:03:06 crc kubenswrapper[4730]: I0930 10:03:06.388319 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" path="/var/lib/kubelet/pods/1b5268d8-ebc0-4df3-8be8-f0ed2ec66431/volumes" Sep 30 10:03:09 crc kubenswrapper[4730]: I0930 10:03:09.400593 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:03:09 crc kubenswrapper[4730]: I0930 10:03:09.456462 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bcn2n"] Sep 30 10:03:09 crc kubenswrapper[4730]: I0930 10:03:09.769402 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bcn2n" podUID="2e760bac-01d0-49ab-9b19-fd35865e761b" containerName="registry-server" containerID="cri-o://a950602b7172fd4246d0be2de62e43578eeaa25aa8f570f19fb56e0198fd9bdc" gracePeriod=2 Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.408881 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.499464 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e760bac-01d0-49ab-9b19-fd35865e761b-catalog-content\") pod \"2e760bac-01d0-49ab-9b19-fd35865e761b\" (UID: \"2e760bac-01d0-49ab-9b19-fd35865e761b\") " Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.499714 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlrnl\" (UniqueName: \"kubernetes.io/projected/2e760bac-01d0-49ab-9b19-fd35865e761b-kube-api-access-tlrnl\") pod \"2e760bac-01d0-49ab-9b19-fd35865e761b\" (UID: \"2e760bac-01d0-49ab-9b19-fd35865e761b\") " Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.499763 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e760bac-01d0-49ab-9b19-fd35865e761b-utilities\") pod \"2e760bac-01d0-49ab-9b19-fd35865e761b\" (UID: \"2e760bac-01d0-49ab-9b19-fd35865e761b\") " Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.503591 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e760bac-01d0-49ab-9b19-fd35865e761b-utilities" (OuterVolumeSpecName: "utilities") pod "2e760bac-01d0-49ab-9b19-fd35865e761b" (UID: "2e760bac-01d0-49ab-9b19-fd35865e761b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.508549 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e760bac-01d0-49ab-9b19-fd35865e761b-kube-api-access-tlrnl" (OuterVolumeSpecName: "kube-api-access-tlrnl") pod "2e760bac-01d0-49ab-9b19-fd35865e761b" (UID: "2e760bac-01d0-49ab-9b19-fd35865e761b"). InnerVolumeSpecName "kube-api-access-tlrnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.548108 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e760bac-01d0-49ab-9b19-fd35865e761b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2e760bac-01d0-49ab-9b19-fd35865e761b" (UID: "2e760bac-01d0-49ab-9b19-fd35865e761b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.602182 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e760bac-01d0-49ab-9b19-fd35865e761b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.602257 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlrnl\" (UniqueName: \"kubernetes.io/projected/2e760bac-01d0-49ab-9b19-fd35865e761b-kube-api-access-tlrnl\") on node \"crc\" DevicePath \"\"" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.602280 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e760bac-01d0-49ab-9b19-fd35865e761b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.779794 4730 generic.go:334] "Generic (PLEG): container finished" podID="7718ff95-7f9a-46a9-a0f9-259e60a9f142" containerID="15c5a00fe911137e52bb1db23be5e5f61796cf8555916b78a0c7776fbef65e0b" exitCode=0 Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.779943 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rwcp7" event={"ID":"7718ff95-7f9a-46a9-a0f9-259e60a9f142","Type":"ContainerDied","Data":"15c5a00fe911137e52bb1db23be5e5f61796cf8555916b78a0c7776fbef65e0b"} Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.784689 4730 generic.go:334] "Generic (PLEG): container finished" podID="2e760bac-01d0-49ab-9b19-fd35865e761b" containerID="a950602b7172fd4246d0be2de62e43578eeaa25aa8f570f19fb56e0198fd9bdc" exitCode=0 Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.784801 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bcn2n" event={"ID":"2e760bac-01d0-49ab-9b19-fd35865e761b","Type":"ContainerDied","Data":"a950602b7172fd4246d0be2de62e43578eeaa25aa8f570f19fb56e0198fd9bdc"} Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.784842 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bcn2n" event={"ID":"2e760bac-01d0-49ab-9b19-fd35865e761b","Type":"ContainerDied","Data":"c6fc95fc6e9a200157e4234e5023ecd2bf1f8eb161352cb1c09f8bd12be44f91"} Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.784873 4730 scope.go:117] "RemoveContainer" containerID="a950602b7172fd4246d0be2de62e43578eeaa25aa8f570f19fb56e0198fd9bdc" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.785083 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bcn2n" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.787509 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" event={"ID":"c7e58e36-b0e5-4531-9e55-bf09a14d556e","Type":"ContainerStarted","Data":"517f9467828ca9d31f55cb51e1aec13302c7a9370c3061e6e7925c33ca004f7c"} Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.788128 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.804543 4730 scope.go:117] "RemoveContainer" containerID="0404d03e3ff025ba9a67851c57381c46e7f2ee9d23a8f4bea7e82fc6dbdd478b" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.831283 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bcn2n"] Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.837376 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bcn2n"] Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.839264 4730 scope.go:117] "RemoveContainer" containerID="68426d8b857bb19d86447819e59dc42b349f7694a030d5ecd41b0d5684192516" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.846638 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" podStartSLOduration=2.278877122 podStartE2EDuration="11.846597136s" podCreationTimestamp="2025-09-30 10:02:59 +0000 UTC" firstStartedPulling="2025-09-30 10:03:00.761677257 +0000 UTC m=+825.094937250" lastFinishedPulling="2025-09-30 10:03:10.329397271 +0000 UTC m=+834.662657264" observedRunningTime="2025-09-30 10:03:10.839286418 +0000 UTC m=+835.172546411" watchObservedRunningTime="2025-09-30 10:03:10.846597136 +0000 UTC m=+835.179857129" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.859469 4730 scope.go:117] "RemoveContainer" containerID="a950602b7172fd4246d0be2de62e43578eeaa25aa8f570f19fb56e0198fd9bdc" Sep 30 10:03:10 crc kubenswrapper[4730]: E0930 10:03:10.859929 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a950602b7172fd4246d0be2de62e43578eeaa25aa8f570f19fb56e0198fd9bdc\": container with ID starting with a950602b7172fd4246d0be2de62e43578eeaa25aa8f570f19fb56e0198fd9bdc not found: ID does not exist" containerID="a950602b7172fd4246d0be2de62e43578eeaa25aa8f570f19fb56e0198fd9bdc" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.859981 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a950602b7172fd4246d0be2de62e43578eeaa25aa8f570f19fb56e0198fd9bdc"} err="failed to get container status \"a950602b7172fd4246d0be2de62e43578eeaa25aa8f570f19fb56e0198fd9bdc\": rpc error: code = NotFound desc = could not find container \"a950602b7172fd4246d0be2de62e43578eeaa25aa8f570f19fb56e0198fd9bdc\": container with ID starting with a950602b7172fd4246d0be2de62e43578eeaa25aa8f570f19fb56e0198fd9bdc not found: ID does not exist" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.860010 4730 scope.go:117] "RemoveContainer" containerID="0404d03e3ff025ba9a67851c57381c46e7f2ee9d23a8f4bea7e82fc6dbdd478b" Sep 30 10:03:10 crc kubenswrapper[4730]: E0930 10:03:10.860473 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0404d03e3ff025ba9a67851c57381c46e7f2ee9d23a8f4bea7e82fc6dbdd478b\": container with ID starting with 0404d03e3ff025ba9a67851c57381c46e7f2ee9d23a8f4bea7e82fc6dbdd478b not found: ID does not exist" containerID="0404d03e3ff025ba9a67851c57381c46e7f2ee9d23a8f4bea7e82fc6dbdd478b" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.860507 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0404d03e3ff025ba9a67851c57381c46e7f2ee9d23a8f4bea7e82fc6dbdd478b"} err="failed to get container status \"0404d03e3ff025ba9a67851c57381c46e7f2ee9d23a8f4bea7e82fc6dbdd478b\": rpc error: code = NotFound desc = could not find container \"0404d03e3ff025ba9a67851c57381c46e7f2ee9d23a8f4bea7e82fc6dbdd478b\": container with ID starting with 0404d03e3ff025ba9a67851c57381c46e7f2ee9d23a8f4bea7e82fc6dbdd478b not found: ID does not exist" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.860528 4730 scope.go:117] "RemoveContainer" containerID="68426d8b857bb19d86447819e59dc42b349f7694a030d5ecd41b0d5684192516" Sep 30 10:03:10 crc kubenswrapper[4730]: E0930 10:03:10.860996 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68426d8b857bb19d86447819e59dc42b349f7694a030d5ecd41b0d5684192516\": container with ID starting with 68426d8b857bb19d86447819e59dc42b349f7694a030d5ecd41b0d5684192516 not found: ID does not exist" containerID="68426d8b857bb19d86447819e59dc42b349f7694a030d5ecd41b0d5684192516" Sep 30 10:03:10 crc kubenswrapper[4730]: I0930 10:03:10.861076 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68426d8b857bb19d86447819e59dc42b349f7694a030d5ecd41b0d5684192516"} err="failed to get container status \"68426d8b857bb19d86447819e59dc42b349f7694a030d5ecd41b0d5684192516\": rpc error: code = NotFound desc = could not find container \"68426d8b857bb19d86447819e59dc42b349f7694a030d5ecd41b0d5684192516\": container with ID starting with 68426d8b857bb19d86447819e59dc42b349f7694a030d5ecd41b0d5684192516 not found: ID does not exist" Sep 30 10:03:11 crc kubenswrapper[4730]: I0930 10:03:11.384969 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-bgftk" Sep 30 10:03:11 crc kubenswrapper[4730]: I0930 10:03:11.795197 4730 generic.go:334] "Generic (PLEG): container finished" podID="7718ff95-7f9a-46a9-a0f9-259e60a9f142" containerID="949d987b1a135bfc63640f71c3b70c48cac3494950deaab9a1f6890ea0ff764c" exitCode=0 Sep 30 10:03:11 crc kubenswrapper[4730]: I0930 10:03:11.796229 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rwcp7" event={"ID":"7718ff95-7f9a-46a9-a0f9-259e60a9f142","Type":"ContainerDied","Data":"949d987b1a135bfc63640f71c3b70c48cac3494950deaab9a1f6890ea0ff764c"} Sep 30 10:03:12 crc kubenswrapper[4730]: I0930 10:03:12.390681 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e760bac-01d0-49ab-9b19-fd35865e761b" path="/var/lib/kubelet/pods/2e760bac-01d0-49ab-9b19-fd35865e761b/volumes" Sep 30 10:03:12 crc kubenswrapper[4730]: I0930 10:03:12.803185 4730 generic.go:334] "Generic (PLEG): container finished" podID="7718ff95-7f9a-46a9-a0f9-259e60a9f142" containerID="7c1cd9c7a4df702c52a14248371f1112be2ffbb22653b2b7722ef8c76ca08bd2" exitCode=0 Sep 30 10:03:12 crc kubenswrapper[4730]: I0930 10:03:12.803246 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rwcp7" event={"ID":"7718ff95-7f9a-46a9-a0f9-259e60a9f142","Type":"ContainerDied","Data":"7c1cd9c7a4df702c52a14248371f1112be2ffbb22653b2b7722ef8c76ca08bd2"} Sep 30 10:03:13 crc kubenswrapper[4730]: I0930 10:03:13.815200 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rwcp7" event={"ID":"7718ff95-7f9a-46a9-a0f9-259e60a9f142","Type":"ContainerStarted","Data":"6e6b05717fd7657d090a7d0101138b8a112c2d4680dc6cc507a2e6ccebc8ddb8"} Sep 30 10:03:13 crc kubenswrapper[4730]: I0930 10:03:13.815464 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rwcp7" event={"ID":"7718ff95-7f9a-46a9-a0f9-259e60a9f142","Type":"ContainerStarted","Data":"ddf8137486f4e34c6470fd812936bd5d166f15b4d4162968c42bb24697ea2182"} Sep 30 10:03:13 crc kubenswrapper[4730]: I0930 10:03:13.815479 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rwcp7" event={"ID":"7718ff95-7f9a-46a9-a0f9-259e60a9f142","Type":"ContainerStarted","Data":"0fd39e539d76aa7460d0ddb7cef49e189ee009f6067f30c203500f051861ff2f"} Sep 30 10:03:13 crc kubenswrapper[4730]: I0930 10:03:13.815489 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rwcp7" event={"ID":"7718ff95-7f9a-46a9-a0f9-259e60a9f142","Type":"ContainerStarted","Data":"ceea85a2bb92f1d9c018cc3ecc8f93a32bf74eb8f75fd6210ec927d73d9066bf"} Sep 30 10:03:13 crc kubenswrapper[4730]: I0930 10:03:13.815501 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rwcp7" event={"ID":"7718ff95-7f9a-46a9-a0f9-259e60a9f142","Type":"ContainerStarted","Data":"08a905ba5b5fa7dd24d8696c51664644417a945ba2557773853bfdb591bc06e0"} Sep 30 10:03:13 crc kubenswrapper[4730]: I0930 10:03:13.815512 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rwcp7" event={"ID":"7718ff95-7f9a-46a9-a0f9-259e60a9f142","Type":"ContainerStarted","Data":"12aac56818c3e1b93918aae09194d67b2686382e7211ca82ca2d9dc3ea6a1e86"} Sep 30 10:03:13 crc kubenswrapper[4730]: I0930 10:03:13.816663 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:03:13 crc kubenswrapper[4730]: I0930 10:03:13.847089 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-rwcp7" podStartSLOduration=5.03024066 podStartE2EDuration="14.847070532s" podCreationTimestamp="2025-09-30 10:02:59 +0000 UTC" firstStartedPulling="2025-09-30 10:03:00.474350514 +0000 UTC m=+824.807610507" lastFinishedPulling="2025-09-30 10:03:10.291180386 +0000 UTC m=+834.624440379" observedRunningTime="2025-09-30 10:03:13.842909274 +0000 UTC m=+838.176169287" watchObservedRunningTime="2025-09-30 10:03:13.847070532 +0000 UTC m=+838.180330535" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.361150 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.398557 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.436340 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-4r5tx"] Sep 30 10:03:15 crc kubenswrapper[4730]: E0930 10:03:15.436652 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e760bac-01d0-49ab-9b19-fd35865e761b" containerName="extract-content" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.436674 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e760bac-01d0-49ab-9b19-fd35865e761b" containerName="extract-content" Sep 30 10:03:15 crc kubenswrapper[4730]: E0930 10:03:15.436692 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e760bac-01d0-49ab-9b19-fd35865e761b" containerName="registry-server" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.436703 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e760bac-01d0-49ab-9b19-fd35865e761b" containerName="registry-server" Sep 30 10:03:15 crc kubenswrapper[4730]: E0930 10:03:15.436720 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" containerName="extract-utilities" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.436727 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" containerName="extract-utilities" Sep 30 10:03:15 crc kubenswrapper[4730]: E0930 10:03:15.436741 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e760bac-01d0-49ab-9b19-fd35865e761b" containerName="extract-utilities" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.436749 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e760bac-01d0-49ab-9b19-fd35865e761b" containerName="extract-utilities" Sep 30 10:03:15 crc kubenswrapper[4730]: E0930 10:03:15.436760 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" containerName="registry-server" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.436768 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" containerName="registry-server" Sep 30 10:03:15 crc kubenswrapper[4730]: E0930 10:03:15.436781 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" containerName="extract-content" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.436788 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" containerName="extract-content" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.436933 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e760bac-01d0-49ab-9b19-fd35865e761b" containerName="registry-server" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.436951 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b5268d8-ebc0-4df3-8be8-f0ed2ec66431" containerName="registry-server" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.437438 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4r5tx" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.444042 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.444535 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-tjvbs" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.444704 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.457471 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4r5tx"] Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.462737 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq75t\" (UniqueName: \"kubernetes.io/projected/9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4-kube-api-access-sq75t\") pod \"openstack-operator-index-4r5tx\" (UID: \"9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4\") " pod="openstack-operators/openstack-operator-index-4r5tx" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.563892 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq75t\" (UniqueName: \"kubernetes.io/projected/9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4-kube-api-access-sq75t\") pod \"openstack-operator-index-4r5tx\" (UID: \"9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4\") " pod="openstack-operators/openstack-operator-index-4r5tx" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.581570 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq75t\" (UniqueName: \"kubernetes.io/projected/9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4-kube-api-access-sq75t\") pod \"openstack-operator-index-4r5tx\" (UID: \"9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4\") " pod="openstack-operators/openstack-operator-index-4r5tx" Sep 30 10:03:15 crc kubenswrapper[4730]: I0930 10:03:15.761860 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4r5tx" Sep 30 10:03:16 crc kubenswrapper[4730]: I0930 10:03:16.160084 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4r5tx"] Sep 30 10:03:16 crc kubenswrapper[4730]: I0930 10:03:16.834274 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4r5tx" event={"ID":"9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4","Type":"ContainerStarted","Data":"ff258c63b4597508869cdbe1adff9bdfcea91387c2754a6449cb2e86ce6f2603"} Sep 30 10:03:19 crc kubenswrapper[4730]: I0930 10:03:19.830465 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-4r5tx"] Sep 30 10:03:19 crc kubenswrapper[4730]: I0930 10:03:19.857185 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4r5tx" event={"ID":"9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4","Type":"ContainerStarted","Data":"623543c40282aba8fe8a872bf9ecf73939760dd51ba23418cf442a2d4effd2c8"} Sep 30 10:03:19 crc kubenswrapper[4730]: I0930 10:03:19.879324 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-4r5tx" podStartSLOduration=1.769353838 podStartE2EDuration="4.879306566s" podCreationTimestamp="2025-09-30 10:03:15 +0000 UTC" firstStartedPulling="2025-09-30 10:03:16.168230277 +0000 UTC m=+840.501490270" lastFinishedPulling="2025-09-30 10:03:19.278183005 +0000 UTC m=+843.611442998" observedRunningTime="2025-09-30 10:03:19.871373461 +0000 UTC m=+844.204633454" watchObservedRunningTime="2025-09-30 10:03:19.879306566 +0000 UTC m=+844.212566549" Sep 30 10:03:19 crc kubenswrapper[4730]: I0930 10:03:19.902931 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-hp2xq" Sep 30 10:03:20 crc kubenswrapper[4730]: I0930 10:03:20.378984 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-w96cg" Sep 30 10:03:20 crc kubenswrapper[4730]: I0930 10:03:20.439416 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-b5xnk"] Sep 30 10:03:20 crc kubenswrapper[4730]: I0930 10:03:20.440272 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-b5xnk" Sep 30 10:03:20 crc kubenswrapper[4730]: I0930 10:03:20.443852 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2knm\" (UniqueName: \"kubernetes.io/projected/6c48593a-c4fc-49ae-a91b-4a5f57667d3f-kube-api-access-p2knm\") pod \"openstack-operator-index-b5xnk\" (UID: \"6c48593a-c4fc-49ae-a91b-4a5f57667d3f\") " pod="openstack-operators/openstack-operator-index-b5xnk" Sep 30 10:03:20 crc kubenswrapper[4730]: I0930 10:03:20.450271 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-b5xnk"] Sep 30 10:03:20 crc kubenswrapper[4730]: I0930 10:03:20.545058 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2knm\" (UniqueName: \"kubernetes.io/projected/6c48593a-c4fc-49ae-a91b-4a5f57667d3f-kube-api-access-p2knm\") pod \"openstack-operator-index-b5xnk\" (UID: \"6c48593a-c4fc-49ae-a91b-4a5f57667d3f\") " pod="openstack-operators/openstack-operator-index-b5xnk" Sep 30 10:03:20 crc kubenswrapper[4730]: I0930 10:03:20.566842 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2knm\" (UniqueName: \"kubernetes.io/projected/6c48593a-c4fc-49ae-a91b-4a5f57667d3f-kube-api-access-p2knm\") pod \"openstack-operator-index-b5xnk\" (UID: \"6c48593a-c4fc-49ae-a91b-4a5f57667d3f\") " pod="openstack-operators/openstack-operator-index-b5xnk" Sep 30 10:03:20 crc kubenswrapper[4730]: I0930 10:03:20.768954 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-b5xnk" Sep 30 10:03:20 crc kubenswrapper[4730]: I0930 10:03:20.863961 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-4r5tx" podUID="9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4" containerName="registry-server" containerID="cri-o://623543c40282aba8fe8a872bf9ecf73939760dd51ba23418cf442a2d4effd2c8" gracePeriod=2 Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.199437 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-b5xnk"] Sep 30 10:03:21 crc kubenswrapper[4730]: W0930 10:03:21.203950 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c48593a_c4fc_49ae_a91b_4a5f57667d3f.slice/crio-f760a3da21c8ab954a1fb0425d326bfbc53bda57c7969a7b5c8af7b5134a1895 WatchSource:0}: Error finding container f760a3da21c8ab954a1fb0425d326bfbc53bda57c7969a7b5c8af7b5134a1895: Status 404 returned error can't find the container with id f760a3da21c8ab954a1fb0425d326bfbc53bda57c7969a7b5c8af7b5134a1895 Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.264246 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4r5tx" Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.456979 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sq75t\" (UniqueName: \"kubernetes.io/projected/9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4-kube-api-access-sq75t\") pod \"9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4\" (UID: \"9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4\") " Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.461270 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4-kube-api-access-sq75t" (OuterVolumeSpecName: "kube-api-access-sq75t") pod "9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4" (UID: "9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4"). InnerVolumeSpecName "kube-api-access-sq75t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.558990 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sq75t\" (UniqueName: \"kubernetes.io/projected/9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4-kube-api-access-sq75t\") on node \"crc\" DevicePath \"\"" Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.871169 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-b5xnk" event={"ID":"6c48593a-c4fc-49ae-a91b-4a5f57667d3f","Type":"ContainerStarted","Data":"6a7a50c70bc1e5fba94b7ee56103adf03a79a80736360f0995f922e3e727bf79"} Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.871227 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-b5xnk" event={"ID":"6c48593a-c4fc-49ae-a91b-4a5f57667d3f","Type":"ContainerStarted","Data":"f760a3da21c8ab954a1fb0425d326bfbc53bda57c7969a7b5c8af7b5134a1895"} Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.873732 4730 generic.go:334] "Generic (PLEG): container finished" podID="9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4" containerID="623543c40282aba8fe8a872bf9ecf73939760dd51ba23418cf442a2d4effd2c8" exitCode=0 Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.873791 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4r5tx" Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.873782 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4r5tx" event={"ID":"9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4","Type":"ContainerDied","Data":"623543c40282aba8fe8a872bf9ecf73939760dd51ba23418cf442a2d4effd2c8"} Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.873912 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4r5tx" event={"ID":"9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4","Type":"ContainerDied","Data":"ff258c63b4597508869cdbe1adff9bdfcea91387c2754a6449cb2e86ce6f2603"} Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.873930 4730 scope.go:117] "RemoveContainer" containerID="623543c40282aba8fe8a872bf9ecf73939760dd51ba23418cf442a2d4effd2c8" Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.894833 4730 scope.go:117] "RemoveContainer" containerID="623543c40282aba8fe8a872bf9ecf73939760dd51ba23418cf442a2d4effd2c8" Sep 30 10:03:21 crc kubenswrapper[4730]: E0930 10:03:21.897301 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"623543c40282aba8fe8a872bf9ecf73939760dd51ba23418cf442a2d4effd2c8\": container with ID starting with 623543c40282aba8fe8a872bf9ecf73939760dd51ba23418cf442a2d4effd2c8 not found: ID does not exist" containerID="623543c40282aba8fe8a872bf9ecf73939760dd51ba23418cf442a2d4effd2c8" Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.897368 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"623543c40282aba8fe8a872bf9ecf73939760dd51ba23418cf442a2d4effd2c8"} err="failed to get container status \"623543c40282aba8fe8a872bf9ecf73939760dd51ba23418cf442a2d4effd2c8\": rpc error: code = NotFound desc = could not find container \"623543c40282aba8fe8a872bf9ecf73939760dd51ba23418cf442a2d4effd2c8\": container with ID starting with 623543c40282aba8fe8a872bf9ecf73939760dd51ba23418cf442a2d4effd2c8 not found: ID does not exist" Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.901795 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-b5xnk" podStartSLOduration=1.850969557 podStartE2EDuration="1.901768345s" podCreationTimestamp="2025-09-30 10:03:20 +0000 UTC" firstStartedPulling="2025-09-30 10:03:21.207564759 +0000 UTC m=+845.540824752" lastFinishedPulling="2025-09-30 10:03:21.258363547 +0000 UTC m=+845.591623540" observedRunningTime="2025-09-30 10:03:21.895814223 +0000 UTC m=+846.229074236" watchObservedRunningTime="2025-09-30 10:03:21.901768345 +0000 UTC m=+846.235028358" Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.914731 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-4r5tx"] Sep 30 10:03:21 crc kubenswrapper[4730]: I0930 10:03:21.919905 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-4r5tx"] Sep 30 10:03:22 crc kubenswrapper[4730]: I0930 10:03:22.388204 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4" path="/var/lib/kubelet/pods/9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4/volumes" Sep 30 10:03:30 crc kubenswrapper[4730]: I0930 10:03:30.365157 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-rwcp7" Sep 30 10:03:30 crc kubenswrapper[4730]: I0930 10:03:30.769781 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-b5xnk" Sep 30 10:03:30 crc kubenswrapper[4730]: I0930 10:03:30.769862 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-b5xnk" Sep 30 10:03:30 crc kubenswrapper[4730]: I0930 10:03:30.814947 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-b5xnk" Sep 30 10:03:30 crc kubenswrapper[4730]: I0930 10:03:30.954699 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-b5xnk" Sep 30 10:03:36 crc kubenswrapper[4730]: I0930 10:03:36.951425 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp"] Sep 30 10:03:36 crc kubenswrapper[4730]: E0930 10:03:36.952174 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4" containerName="registry-server" Sep 30 10:03:36 crc kubenswrapper[4730]: I0930 10:03:36.952187 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4" containerName="registry-server" Sep 30 10:03:36 crc kubenswrapper[4730]: I0930 10:03:36.952302 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="9de9c5b0-58ea-4e1e-ac23-cb9d597f42b4" containerName="registry-server" Sep 30 10:03:36 crc kubenswrapper[4730]: I0930 10:03:36.954077 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" Sep 30 10:03:36 crc kubenswrapper[4730]: I0930 10:03:36.957871 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-dnmrg" Sep 30 10:03:36 crc kubenswrapper[4730]: I0930 10:03:36.958936 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp"] Sep 30 10:03:37 crc kubenswrapper[4730]: I0930 10:03:37.070873 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-util\") pod \"d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp\" (UID: \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\") " pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" Sep 30 10:03:37 crc kubenswrapper[4730]: I0930 10:03:37.070927 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-bundle\") pod \"d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp\" (UID: \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\") " pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" Sep 30 10:03:37 crc kubenswrapper[4730]: I0930 10:03:37.070982 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ntvl\" (UniqueName: \"kubernetes.io/projected/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-kube-api-access-2ntvl\") pod \"d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp\" (UID: \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\") " pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" Sep 30 10:03:37 crc kubenswrapper[4730]: I0930 10:03:37.172489 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-util\") pod \"d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp\" (UID: \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\") " pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" Sep 30 10:03:37 crc kubenswrapper[4730]: I0930 10:03:37.172549 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-bundle\") pod \"d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp\" (UID: \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\") " pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" Sep 30 10:03:37 crc kubenswrapper[4730]: I0930 10:03:37.172602 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ntvl\" (UniqueName: \"kubernetes.io/projected/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-kube-api-access-2ntvl\") pod \"d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp\" (UID: \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\") " pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" Sep 30 10:03:37 crc kubenswrapper[4730]: I0930 10:03:37.173319 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-bundle\") pod \"d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp\" (UID: \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\") " pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" Sep 30 10:03:37 crc kubenswrapper[4730]: I0930 10:03:37.173345 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-util\") pod \"d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp\" (UID: \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\") " pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" Sep 30 10:03:37 crc kubenswrapper[4730]: I0930 10:03:37.190720 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ntvl\" (UniqueName: \"kubernetes.io/projected/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-kube-api-access-2ntvl\") pod \"d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp\" (UID: \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\") " pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" Sep 30 10:03:37 crc kubenswrapper[4730]: I0930 10:03:37.305180 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" Sep 30 10:03:37 crc kubenswrapper[4730]: I0930 10:03:37.726353 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp"] Sep 30 10:03:37 crc kubenswrapper[4730]: W0930 10:03:37.738100 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7753738_376e_4cbd_ad5b_42b0bc98a1a8.slice/crio-f22d5bb708bdb5bd744106db37fb25e6f252b053cce573b3797573005a30abfe WatchSource:0}: Error finding container f22d5bb708bdb5bd744106db37fb25e6f252b053cce573b3797573005a30abfe: Status 404 returned error can't find the container with id f22d5bb708bdb5bd744106db37fb25e6f252b053cce573b3797573005a30abfe Sep 30 10:03:37 crc kubenswrapper[4730]: I0930 10:03:37.985836 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" event={"ID":"e7753738-376e-4cbd-ad5b-42b0bc98a1a8","Type":"ContainerStarted","Data":"0fd02b81f97a50ec97201559a28a31577bab182e8b2a17dc662534b10f31ded1"} Sep 30 10:03:37 crc kubenswrapper[4730]: I0930 10:03:37.985895 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" event={"ID":"e7753738-376e-4cbd-ad5b-42b0bc98a1a8","Type":"ContainerStarted","Data":"f22d5bb708bdb5bd744106db37fb25e6f252b053cce573b3797573005a30abfe"} Sep 30 10:03:38 crc kubenswrapper[4730]: I0930 10:03:38.993233 4730 generic.go:334] "Generic (PLEG): container finished" podID="e7753738-376e-4cbd-ad5b-42b0bc98a1a8" containerID="0fd02b81f97a50ec97201559a28a31577bab182e8b2a17dc662534b10f31ded1" exitCode=0 Sep 30 10:03:38 crc kubenswrapper[4730]: I0930 10:03:38.993329 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" event={"ID":"e7753738-376e-4cbd-ad5b-42b0bc98a1a8","Type":"ContainerDied","Data":"0fd02b81f97a50ec97201559a28a31577bab182e8b2a17dc662534b10f31ded1"} Sep 30 10:03:40 crc kubenswrapper[4730]: I0930 10:03:40.004881 4730 generic.go:334] "Generic (PLEG): container finished" podID="e7753738-376e-4cbd-ad5b-42b0bc98a1a8" containerID="dfbec63956d0097e4e5e1fd44d6e85264c72d6ff539e21e84b5942005e67cefd" exitCode=0 Sep 30 10:03:40 crc kubenswrapper[4730]: I0930 10:03:40.004939 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" event={"ID":"e7753738-376e-4cbd-ad5b-42b0bc98a1a8","Type":"ContainerDied","Data":"dfbec63956d0097e4e5e1fd44d6e85264c72d6ff539e21e84b5942005e67cefd"} Sep 30 10:03:41 crc kubenswrapper[4730]: I0930 10:03:41.012607 4730 generic.go:334] "Generic (PLEG): container finished" podID="e7753738-376e-4cbd-ad5b-42b0bc98a1a8" containerID="4f5d55768080e87a1a9b0c5bfdfd2d82dfac3e9b31d761b35d2d89ad02da50da" exitCode=0 Sep 30 10:03:41 crc kubenswrapper[4730]: I0930 10:03:41.012724 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" event={"ID":"e7753738-376e-4cbd-ad5b-42b0bc98a1a8","Type":"ContainerDied","Data":"4f5d55768080e87a1a9b0c5bfdfd2d82dfac3e9b31d761b35d2d89ad02da50da"} Sep 30 10:03:42 crc kubenswrapper[4730]: I0930 10:03:42.301058 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" Sep 30 10:03:42 crc kubenswrapper[4730]: I0930 10:03:42.442003 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-util\") pod \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\" (UID: \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\") " Sep 30 10:03:42 crc kubenswrapper[4730]: I0930 10:03:42.442129 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ntvl\" (UniqueName: \"kubernetes.io/projected/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-kube-api-access-2ntvl\") pod \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\" (UID: \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\") " Sep 30 10:03:42 crc kubenswrapper[4730]: I0930 10:03:42.442348 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-bundle\") pod \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\" (UID: \"e7753738-376e-4cbd-ad5b-42b0bc98a1a8\") " Sep 30 10:03:42 crc kubenswrapper[4730]: I0930 10:03:42.443028 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-bundle" (OuterVolumeSpecName: "bundle") pod "e7753738-376e-4cbd-ad5b-42b0bc98a1a8" (UID: "e7753738-376e-4cbd-ad5b-42b0bc98a1a8"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:03:42 crc kubenswrapper[4730]: I0930 10:03:42.447086 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-kube-api-access-2ntvl" (OuterVolumeSpecName: "kube-api-access-2ntvl") pod "e7753738-376e-4cbd-ad5b-42b0bc98a1a8" (UID: "e7753738-376e-4cbd-ad5b-42b0bc98a1a8"). InnerVolumeSpecName "kube-api-access-2ntvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:03:42 crc kubenswrapper[4730]: I0930 10:03:42.462922 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-util" (OuterVolumeSpecName: "util") pod "e7753738-376e-4cbd-ad5b-42b0bc98a1a8" (UID: "e7753738-376e-4cbd-ad5b-42b0bc98a1a8"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:03:42 crc kubenswrapper[4730]: I0930 10:03:42.543675 4730 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-util\") on node \"crc\" DevicePath \"\"" Sep 30 10:03:42 crc kubenswrapper[4730]: I0930 10:03:42.543724 4730 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:03:42 crc kubenswrapper[4730]: I0930 10:03:42.543739 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ntvl\" (UniqueName: \"kubernetes.io/projected/e7753738-376e-4cbd-ad5b-42b0bc98a1a8-kube-api-access-2ntvl\") on node \"crc\" DevicePath \"\"" Sep 30 10:03:43 crc kubenswrapper[4730]: I0930 10:03:43.028367 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" event={"ID":"e7753738-376e-4cbd-ad5b-42b0bc98a1a8","Type":"ContainerDied","Data":"f22d5bb708bdb5bd744106db37fb25e6f252b053cce573b3797573005a30abfe"} Sep 30 10:03:43 crc kubenswrapper[4730]: I0930 10:03:43.028691 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f22d5bb708bdb5bd744106db37fb25e6f252b053cce573b3797573005a30abfe" Sep 30 10:03:43 crc kubenswrapper[4730]: I0930 10:03:43.028458 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp" Sep 30 10:03:49 crc kubenswrapper[4730]: I0930 10:03:49.596182 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb"] Sep 30 10:03:49 crc kubenswrapper[4730]: E0930 10:03:49.596434 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7753738-376e-4cbd-ad5b-42b0bc98a1a8" containerName="util" Sep 30 10:03:49 crc kubenswrapper[4730]: I0930 10:03:49.596446 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7753738-376e-4cbd-ad5b-42b0bc98a1a8" containerName="util" Sep 30 10:03:49 crc kubenswrapper[4730]: E0930 10:03:49.596455 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7753738-376e-4cbd-ad5b-42b0bc98a1a8" containerName="pull" Sep 30 10:03:49 crc kubenswrapper[4730]: I0930 10:03:49.596460 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7753738-376e-4cbd-ad5b-42b0bc98a1a8" containerName="pull" Sep 30 10:03:49 crc kubenswrapper[4730]: E0930 10:03:49.596470 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7753738-376e-4cbd-ad5b-42b0bc98a1a8" containerName="extract" Sep 30 10:03:49 crc kubenswrapper[4730]: I0930 10:03:49.596476 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7753738-376e-4cbd-ad5b-42b0bc98a1a8" containerName="extract" Sep 30 10:03:49 crc kubenswrapper[4730]: I0930 10:03:49.596595 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7753738-376e-4cbd-ad5b-42b0bc98a1a8" containerName="extract" Sep 30 10:03:49 crc kubenswrapper[4730]: I0930 10:03:49.597300 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb" Sep 30 10:03:49 crc kubenswrapper[4730]: I0930 10:03:49.600680 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-cvnk5" Sep 30 10:03:49 crc kubenswrapper[4730]: I0930 10:03:49.654642 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrwxs\" (UniqueName: \"kubernetes.io/projected/020b365e-9b85-4464-b95f-47e12c8812c5-kube-api-access-jrwxs\") pod \"openstack-operator-controller-operator-f85b56ffc-k4dxb\" (UID: \"020b365e-9b85-4464-b95f-47e12c8812c5\") " pod="openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb" Sep 30 10:03:49 crc kubenswrapper[4730]: I0930 10:03:49.677876 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb"] Sep 30 10:03:49 crc kubenswrapper[4730]: I0930 10:03:49.755721 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrwxs\" (UniqueName: \"kubernetes.io/projected/020b365e-9b85-4464-b95f-47e12c8812c5-kube-api-access-jrwxs\") pod \"openstack-operator-controller-operator-f85b56ffc-k4dxb\" (UID: \"020b365e-9b85-4464-b95f-47e12c8812c5\") " pod="openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb" Sep 30 10:03:49 crc kubenswrapper[4730]: I0930 10:03:49.775843 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrwxs\" (UniqueName: \"kubernetes.io/projected/020b365e-9b85-4464-b95f-47e12c8812c5-kube-api-access-jrwxs\") pod \"openstack-operator-controller-operator-f85b56ffc-k4dxb\" (UID: \"020b365e-9b85-4464-b95f-47e12c8812c5\") " pod="openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb" Sep 30 10:03:49 crc kubenswrapper[4730]: I0930 10:03:49.918578 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb" Sep 30 10:03:50 crc kubenswrapper[4730]: I0930 10:03:50.153674 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb"] Sep 30 10:03:51 crc kubenswrapper[4730]: I0930 10:03:51.089131 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb" event={"ID":"020b365e-9b85-4464-b95f-47e12c8812c5","Type":"ContainerStarted","Data":"35fbaddcf375fb8a91618db9094435f89fc3b8890002564a26628740788c369a"} Sep 30 10:03:55 crc kubenswrapper[4730]: I0930 10:03:55.131258 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb" event={"ID":"020b365e-9b85-4464-b95f-47e12c8812c5","Type":"ContainerStarted","Data":"74b4e26bb3c695390c1f223331995b8d2fc7685017b90db47a33b48bbf80eaa8"} Sep 30 10:03:57 crc kubenswrapper[4730]: I0930 10:03:57.150467 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb" event={"ID":"020b365e-9b85-4464-b95f-47e12c8812c5","Type":"ContainerStarted","Data":"9b8664e211af784eb392fdf07fecbcd34d5563ed4277cf82f8d5ad4f93ebdfe3"} Sep 30 10:03:57 crc kubenswrapper[4730]: I0930 10:03:57.151920 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb" Sep 30 10:03:57 crc kubenswrapper[4730]: I0930 10:03:57.184975 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb" podStartSLOduration=1.777379365 podStartE2EDuration="8.184958177s" podCreationTimestamp="2025-09-30 10:03:49 +0000 UTC" firstStartedPulling="2025-09-30 10:03:50.162150021 +0000 UTC m=+874.495410014" lastFinishedPulling="2025-09-30 10:03:56.569728833 +0000 UTC m=+880.902988826" observedRunningTime="2025-09-30 10:03:57.17954704 +0000 UTC m=+881.512807033" watchObservedRunningTime="2025-09-30 10:03:57.184958177 +0000 UTC m=+881.518218170" Sep 30 10:03:59 crc kubenswrapper[4730]: I0930 10:03:59.167754 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-f85b56ffc-k4dxb" Sep 30 10:04:02 crc kubenswrapper[4730]: I0930 10:04:02.336838 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:04:02 crc kubenswrapper[4730]: I0930 10:04:02.337249 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:04:32 crc kubenswrapper[4730]: I0930 10:04:32.336578 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:04:32 crc kubenswrapper[4730]: I0930 10:04:32.337261 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.205843 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.206931 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.208849 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-lnr4l" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.209534 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.210469 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.214179 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-9dhpr" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.231464 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.237202 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.237381 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.241955 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-hqczh" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.249499 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm7nx\" (UniqueName: \"kubernetes.io/projected/04589829-1e63-438e-b6e8-bdaa6f5ebc19-kube-api-access-sm7nx\") pod \"cinder-operator-controller-manager-644bddb6d8-qx68r\" (UID: \"04589829-1e63-438e-b6e8-bdaa6f5ebc19\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.249570 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzmtx\" (UniqueName: \"kubernetes.io/projected/978da32e-9bbe-453d-ba3f-32a89f23550e-kube-api-access-hzmtx\") pod \"barbican-operator-controller-manager-6ff8b75857-nk8jc\" (UID: \"978da32e-9bbe-453d-ba3f-32a89f23550e\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.249913 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.255820 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.256919 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.259071 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-c5ctr" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.261318 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.273220 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.275813 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.281781 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-b8tpr" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.286135 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.300146 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.320368 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.321399 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.328406 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-84jlh" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.328754 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.339718 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.340725 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.343368 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-w4xkq" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.350521 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm7nx\" (UniqueName: \"kubernetes.io/projected/04589829-1e63-438e-b6e8-bdaa6f5ebc19-kube-api-access-sm7nx\") pod \"cinder-operator-controller-manager-644bddb6d8-qx68r\" (UID: \"04589829-1e63-438e-b6e8-bdaa6f5ebc19\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.350576 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4201b1a7-e458-49b2-9536-91e6db49ea36-cert\") pod \"infra-operator-controller-manager-7d857cc749-rxq94\" (UID: \"4201b1a7-e458-49b2-9536-91e6db49ea36\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.350605 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9p8fk\" (UniqueName: \"kubernetes.io/projected/4201b1a7-e458-49b2-9536-91e6db49ea36-kube-api-access-9p8fk\") pod \"infra-operator-controller-manager-7d857cc749-rxq94\" (UID: \"4201b1a7-e458-49b2-9536-91e6db49ea36\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.350697 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmjt4\" (UniqueName: \"kubernetes.io/projected/2758692b-990d-4330-9765-22614cd379a0-kube-api-access-tmjt4\") pod \"designate-operator-controller-manager-84f4f7b77b-tglx9\" (UID: \"2758692b-990d-4330-9765-22614cd379a0\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.350735 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzmtx\" (UniqueName: \"kubernetes.io/projected/978da32e-9bbe-453d-ba3f-32a89f23550e-kube-api-access-hzmtx\") pod \"barbican-operator-controller-manager-6ff8b75857-nk8jc\" (UID: \"978da32e-9bbe-453d-ba3f-32a89f23550e\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.350796 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpnl6\" (UniqueName: \"kubernetes.io/projected/0f748696-3e59-4b53-a5d2-1dce4b0b6a3a-kube-api-access-cpnl6\") pod \"glance-operator-controller-manager-84958c4d49-jdfxz\" (UID: \"0f748696-3e59-4b53-a5d2-1dce4b0b6a3a\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.350852 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvdf4\" (UniqueName: \"kubernetes.io/projected/419a8cdd-e50e-42f8-b913-61214be0a9a5-kube-api-access-wvdf4\") pod \"heat-operator-controller-manager-5d889d78cf-p957g\" (UID: \"419a8cdd-e50e-42f8-b913-61214be0a9a5\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.354695 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.356040 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.358116 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-mqknx" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.391910 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzmtx\" (UniqueName: \"kubernetes.io/projected/978da32e-9bbe-453d-ba3f-32a89f23550e-kube-api-access-hzmtx\") pod \"barbican-operator-controller-manager-6ff8b75857-nk8jc\" (UID: \"978da32e-9bbe-453d-ba3f-32a89f23550e\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.392160 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.392835 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm7nx\" (UniqueName: \"kubernetes.io/projected/04589829-1e63-438e-b6e8-bdaa6f5ebc19-kube-api-access-sm7nx\") pod \"cinder-operator-controller-manager-644bddb6d8-qx68r\" (UID: \"04589829-1e63-438e-b6e8-bdaa6f5ebc19\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.397647 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.425466 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.439417 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.440820 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.443965 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-csj85" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.450119 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.451348 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.453015 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmjt4\" (UniqueName: \"kubernetes.io/projected/2758692b-990d-4330-9765-22614cd379a0-kube-api-access-tmjt4\") pod \"designate-operator-controller-manager-84f4f7b77b-tglx9\" (UID: \"2758692b-990d-4330-9765-22614cd379a0\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.453084 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clwxx\" (UniqueName: \"kubernetes.io/projected/3cc5b4f8-09e7-44a7-aa40-f173ad8fb157-kube-api-access-clwxx\") pod \"ironic-operator-controller-manager-7975b88857-72mvr\" (UID: \"3cc5b4f8-09e7-44a7-aa40-f173ad8fb157\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.453132 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8xz6\" (UniqueName: \"kubernetes.io/projected/db4dd5c5-bcc5-4782-acf0-42d686edd287-kube-api-access-z8xz6\") pod \"horizon-operator-controller-manager-9f4696d94-mwgsf\" (UID: \"db4dd5c5-bcc5-4782-acf0-42d686edd287\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.453216 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpnl6\" (UniqueName: \"kubernetes.io/projected/0f748696-3e59-4b53-a5d2-1dce4b0b6a3a-kube-api-access-cpnl6\") pod \"glance-operator-controller-manager-84958c4d49-jdfxz\" (UID: \"0f748696-3e59-4b53-a5d2-1dce4b0b6a3a\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.453259 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvdf4\" (UniqueName: \"kubernetes.io/projected/419a8cdd-e50e-42f8-b913-61214be0a9a5-kube-api-access-wvdf4\") pod \"heat-operator-controller-manager-5d889d78cf-p957g\" (UID: \"419a8cdd-e50e-42f8-b913-61214be0a9a5\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.453310 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4201b1a7-e458-49b2-9536-91e6db49ea36-cert\") pod \"infra-operator-controller-manager-7d857cc749-rxq94\" (UID: \"4201b1a7-e458-49b2-9536-91e6db49ea36\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.453338 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9p8fk\" (UniqueName: \"kubernetes.io/projected/4201b1a7-e458-49b2-9536-91e6db49ea36-kube-api-access-9p8fk\") pod \"infra-operator-controller-manager-7d857cc749-rxq94\" (UID: \"4201b1a7-e458-49b2-9536-91e6db49ea36\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" Sep 30 10:04:33 crc kubenswrapper[4730]: E0930 10:04:33.455176 4730 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 30 10:04:33 crc kubenswrapper[4730]: E0930 10:04:33.455301 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4201b1a7-e458-49b2-9536-91e6db49ea36-cert podName:4201b1a7-e458-49b2-9536-91e6db49ea36 nodeName:}" failed. No retries permitted until 2025-09-30 10:04:33.955281005 +0000 UTC m=+918.288540998 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4201b1a7-e458-49b2-9536-91e6db49ea36-cert") pod "infra-operator-controller-manager-7d857cc749-rxq94" (UID: "4201b1a7-e458-49b2-9536-91e6db49ea36") : secret "infra-operator-webhook-server-cert" not found Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.460608 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-6gx7q" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.462483 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.467650 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.480130 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.480239 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.483287 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9p8fk\" (UniqueName: \"kubernetes.io/projected/4201b1a7-e458-49b2-9536-91e6db49ea36-kube-api-access-9p8fk\") pod \"infra-operator-controller-manager-7d857cc749-rxq94\" (UID: \"4201b1a7-e458-49b2-9536-91e6db49ea36\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.488055 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-6mm7z" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.488218 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.488399 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvdf4\" (UniqueName: \"kubernetes.io/projected/419a8cdd-e50e-42f8-b913-61214be0a9a5-kube-api-access-wvdf4\") pod \"heat-operator-controller-manager-5d889d78cf-p957g\" (UID: \"419a8cdd-e50e-42f8-b913-61214be0a9a5\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.501531 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpnl6\" (UniqueName: \"kubernetes.io/projected/0f748696-3e59-4b53-a5d2-1dce4b0b6a3a-kube-api-access-cpnl6\") pod \"glance-operator-controller-manager-84958c4d49-jdfxz\" (UID: \"0f748696-3e59-4b53-a5d2-1dce4b0b6a3a\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.518917 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmjt4\" (UniqueName: \"kubernetes.io/projected/2758692b-990d-4330-9765-22614cd379a0-kube-api-access-tmjt4\") pod \"designate-operator-controller-manager-84f4f7b77b-tglx9\" (UID: \"2758692b-990d-4330-9765-22614cd379a0\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.526754 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.542458 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.546214 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.549887 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.562789 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxxnb\" (UniqueName: \"kubernetes.io/projected/81c8b722-d28f-42d4-8bc0-b82b9eb34500-kube-api-access-vxxnb\") pod \"mariadb-operator-controller-manager-88c7-pr4r7\" (UID: \"81c8b722-d28f-42d4-8bc0-b82b9eb34500\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.562871 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qb78\" (UniqueName: \"kubernetes.io/projected/c24a4e1a-10db-44f2-9de6-16f4081a5609-kube-api-access-7qb78\") pod \"keystone-operator-controller-manager-5bd55b4bff-qsgdr\" (UID: \"c24a4e1a-10db-44f2-9de6-16f4081a5609\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.562910 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clwxx\" (UniqueName: \"kubernetes.io/projected/3cc5b4f8-09e7-44a7-aa40-f173ad8fb157-kube-api-access-clwxx\") pod \"ironic-operator-controller-manager-7975b88857-72mvr\" (UID: \"3cc5b4f8-09e7-44a7-aa40-f173ad8fb157\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.562928 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8xz6\" (UniqueName: \"kubernetes.io/projected/db4dd5c5-bcc5-4782-acf0-42d686edd287-kube-api-access-z8xz6\") pod \"horizon-operator-controller-manager-9f4696d94-mwgsf\" (UID: \"db4dd5c5-bcc5-4782-acf0-42d686edd287\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.562958 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnnmg\" (UniqueName: \"kubernetes.io/projected/27424124-82bf-42fa-a77b-fdbd44f5c24b-kube-api-access-fnnmg\") pod \"manila-operator-controller-manager-6d68dbc695-bgczv\" (UID: \"27424124-82bf-42fa-a77b-fdbd44f5c24b\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.564212 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.565157 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.566143 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.573598 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.576029 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-cwbj8" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.576115 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-l7d5w" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.582471 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.587492 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.608006 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.609140 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.610956 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-z9pxg" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.611380 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.612230 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clwxx\" (UniqueName: \"kubernetes.io/projected/3cc5b4f8-09e7-44a7-aa40-f173ad8fb157-kube-api-access-clwxx\") pod \"ironic-operator-controller-manager-7975b88857-72mvr\" (UID: \"3cc5b4f8-09e7-44a7-aa40-f173ad8fb157\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.623191 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.653356 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8xz6\" (UniqueName: \"kubernetes.io/projected/db4dd5c5-bcc5-4782-acf0-42d686edd287-kube-api-access-z8xz6\") pod \"horizon-operator-controller-manager-9f4696d94-mwgsf\" (UID: \"db4dd5c5-bcc5-4782-acf0-42d686edd287\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.665166 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnnmg\" (UniqueName: \"kubernetes.io/projected/27424124-82bf-42fa-a77b-fdbd44f5c24b-kube-api-access-fnnmg\") pod \"manila-operator-controller-manager-6d68dbc695-bgczv\" (UID: \"27424124-82bf-42fa-a77b-fdbd44f5c24b\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.665226 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpkzs\" (UniqueName: \"kubernetes.io/projected/0f10a085-8ce3-407b-a2ec-b6fabc38bc9f-kube-api-access-jpkzs\") pod \"octavia-operator-controller-manager-76fcc6dc7c-g7zc5\" (UID: \"0f10a085-8ce3-407b-a2ec-b6fabc38bc9f\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.665286 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxxnb\" (UniqueName: \"kubernetes.io/projected/81c8b722-d28f-42d4-8bc0-b82b9eb34500-kube-api-access-vxxnb\") pod \"mariadb-operator-controller-manager-88c7-pr4r7\" (UID: \"81c8b722-d28f-42d4-8bc0-b82b9eb34500\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.665307 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tlfh\" (UniqueName: \"kubernetes.io/projected/7ed8122e-042b-4574-9522-99557d55eedc-kube-api-access-7tlfh\") pod \"nova-operator-controller-manager-c7c776c96-z5tb9\" (UID: \"7ed8122e-042b-4574-9522-99557d55eedc\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.665326 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqw6x\" (UniqueName: \"kubernetes.io/projected/8c6c5d61-fd4e-4b83-9b60-7681c6fc19f3-kube-api-access-nqw6x\") pod \"neutron-operator-controller-manager-64d7b59854-6v77r\" (UID: \"8c6c5d61-fd4e-4b83-9b60-7681c6fc19f3\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.665359 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qb78\" (UniqueName: \"kubernetes.io/projected/c24a4e1a-10db-44f2-9de6-16f4081a5609-kube-api-access-7qb78\") pod \"keystone-operator-controller-manager-5bd55b4bff-qsgdr\" (UID: \"c24a4e1a-10db-44f2-9de6-16f4081a5609\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.680965 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.717047 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnnmg\" (UniqueName: \"kubernetes.io/projected/27424124-82bf-42fa-a77b-fdbd44f5c24b-kube-api-access-fnnmg\") pod \"manila-operator-controller-manager-6d68dbc695-bgczv\" (UID: \"27424124-82bf-42fa-a77b-fdbd44f5c24b\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.717071 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxxnb\" (UniqueName: \"kubernetes.io/projected/81c8b722-d28f-42d4-8bc0-b82b9eb34500-kube-api-access-vxxnb\") pod \"mariadb-operator-controller-manager-88c7-pr4r7\" (UID: \"81c8b722-d28f-42d4-8bc0-b82b9eb34500\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.729370 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qb78\" (UniqueName: \"kubernetes.io/projected/c24a4e1a-10db-44f2-9de6-16f4081a5609-kube-api-access-7qb78\") pod \"keystone-operator-controller-manager-5bd55b4bff-qsgdr\" (UID: \"c24a4e1a-10db-44f2-9de6-16f4081a5609\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.733674 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.734772 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.749383 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.750115 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-bgh5r" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.750482 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.766322 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsg4t\" (UniqueName: \"kubernetes.io/projected/b6969510-2750-4466-b064-7cb67a4acf7e-kube-api-access-qsg4t\") pod \"openstack-baremetal-operator-controller-manager-6d776955-b5jlg\" (UID: \"b6969510-2750-4466-b064-7cb67a4acf7e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.766376 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpkzs\" (UniqueName: \"kubernetes.io/projected/0f10a085-8ce3-407b-a2ec-b6fabc38bc9f-kube-api-access-jpkzs\") pod \"octavia-operator-controller-manager-76fcc6dc7c-g7zc5\" (UID: \"0f10a085-8ce3-407b-a2ec-b6fabc38bc9f\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.766443 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6969510-2750-4466-b064-7cb67a4acf7e-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-b5jlg\" (UID: \"b6969510-2750-4466-b064-7cb67a4acf7e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.766463 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tlfh\" (UniqueName: \"kubernetes.io/projected/7ed8122e-042b-4574-9522-99557d55eedc-kube-api-access-7tlfh\") pod \"nova-operator-controller-manager-c7c776c96-z5tb9\" (UID: \"7ed8122e-042b-4574-9522-99557d55eedc\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.766479 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqw6x\" (UniqueName: \"kubernetes.io/projected/8c6c5d61-fd4e-4b83-9b60-7681c6fc19f3-kube-api-access-nqw6x\") pod \"neutron-operator-controller-manager-64d7b59854-6v77r\" (UID: \"8c6c5d61-fd4e-4b83-9b60-7681c6fc19f3\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.767123 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.781856 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.783171 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.784707 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqw6x\" (UniqueName: \"kubernetes.io/projected/8c6c5d61-fd4e-4b83-9b60-7681c6fc19f3-kube-api-access-nqw6x\") pod \"neutron-operator-controller-manager-64d7b59854-6v77r\" (UID: \"8c6c5d61-fd4e-4b83-9b60-7681c6fc19f3\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.799170 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-ngmq8" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.799423 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpkzs\" (UniqueName: \"kubernetes.io/projected/0f10a085-8ce3-407b-a2ec-b6fabc38bc9f-kube-api-access-jpkzs\") pod \"octavia-operator-controller-manager-76fcc6dc7c-g7zc5\" (UID: \"0f10a085-8ce3-407b-a2ec-b6fabc38bc9f\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.803783 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tlfh\" (UniqueName: \"kubernetes.io/projected/7ed8122e-042b-4574-9522-99557d55eedc-kube-api-access-7tlfh\") pod \"nova-operator-controller-manager-c7c776c96-z5tb9\" (UID: \"7ed8122e-042b-4574-9522-99557d55eedc\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.839108 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.840135 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.840238 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.843085 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-hgmpb" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.869484 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.871122 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsg4t\" (UniqueName: \"kubernetes.io/projected/b6969510-2750-4466-b064-7cb67a4acf7e-kube-api-access-qsg4t\") pod \"openstack-baremetal-operator-controller-manager-6d776955-b5jlg\" (UID: \"b6969510-2750-4466-b064-7cb67a4acf7e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.871184 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9nct\" (UniqueName: \"kubernetes.io/projected/f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e-kube-api-access-p9nct\") pod \"ovn-operator-controller-manager-9976ff44c-tsgfd\" (UID: \"f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.871223 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7snm\" (UniqueName: \"kubernetes.io/projected/28fcf9ea-7f63-4add-bb31-99af57fcce2c-kube-api-access-t7snm\") pod \"placement-operator-controller-manager-589c58c6c-zbmbb\" (UID: \"28fcf9ea-7f63-4add-bb31-99af57fcce2c\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.871246 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6969510-2750-4466-b064-7cb67a4acf7e-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-b5jlg\" (UID: \"b6969510-2750-4466-b064-7cb67a4acf7e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" Sep 30 10:04:33 crc kubenswrapper[4730]: E0930 10:04:33.871376 4730 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 10:04:33 crc kubenswrapper[4730]: E0930 10:04:33.872862 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b6969510-2750-4466-b064-7cb67a4acf7e-cert podName:b6969510-2750-4466-b064-7cb67a4acf7e nodeName:}" failed. No retries permitted until 2025-09-30 10:04:34.372842876 +0000 UTC m=+918.706102879 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b6969510-2750-4466-b064-7cb67a4acf7e-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-b5jlg" (UID: "b6969510-2750-4466-b064-7cb67a4acf7e") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.873837 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.877963 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.879095 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.885761 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.886669 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-zd99r" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.891998 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsg4t\" (UniqueName: \"kubernetes.io/projected/b6969510-2750-4466-b064-7cb67a4acf7e-kube-api-access-qsg4t\") pod \"openstack-baremetal-operator-controller-manager-6d776955-b5jlg\" (UID: \"b6969510-2750-4466-b064-7cb67a4acf7e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.901687 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.910404 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.911163 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.927940 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.929090 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.932808 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.948425 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-6jbsp" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.954566 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh"] Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.972834 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9nct\" (UniqueName: \"kubernetes.io/projected/f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e-kube-api-access-p9nct\") pod \"ovn-operator-controller-manager-9976ff44c-tsgfd\" (UID: \"f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.972894 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4201b1a7-e458-49b2-9536-91e6db49ea36-cert\") pod \"infra-operator-controller-manager-7d857cc749-rxq94\" (UID: \"4201b1a7-e458-49b2-9536-91e6db49ea36\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.987647 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7snm\" (UniqueName: \"kubernetes.io/projected/28fcf9ea-7f63-4add-bb31-99af57fcce2c-kube-api-access-t7snm\") pod \"placement-operator-controller-manager-589c58c6c-zbmbb\" (UID: \"28fcf9ea-7f63-4add-bb31-99af57fcce2c\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.987768 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mszsm\" (UniqueName: \"kubernetes.io/projected/a081f8cc-4fb8-457c-84de-2c7ba2c84821-kube-api-access-mszsm\") pod \"swift-operator-controller-manager-bc7dc7bd9-dsvl6\" (UID: \"a081f8cc-4fb8-457c-84de-2c7ba2c84821\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" Sep 30 10:04:33 crc kubenswrapper[4730]: I0930 10:04:33.987835 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxpxl\" (UniqueName: \"kubernetes.io/projected/e6074e1b-4192-43a7-b391-f4112d2486bf-kube-api-access-fxpxl\") pod \"telemetry-operator-controller-manager-b8d54b5d7-nwvfh\" (UID: \"e6074e1b-4192-43a7-b391-f4112d2486bf\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.000096 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4201b1a7-e458-49b2-9536-91e6db49ea36-cert\") pod \"infra-operator-controller-manager-7d857cc749-rxq94\" (UID: \"4201b1a7-e458-49b2-9536-91e6db49ea36\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.015368 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-djzc5"] Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.023826 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7snm\" (UniqueName: \"kubernetes.io/projected/28fcf9ea-7f63-4add-bb31-99af57fcce2c-kube-api-access-t7snm\") pod \"placement-operator-controller-manager-589c58c6c-zbmbb\" (UID: \"28fcf9ea-7f63-4add-bb31-99af57fcce2c\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.024198 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.033501 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9nct\" (UniqueName: \"kubernetes.io/projected/f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e-kube-api-access-p9nct\") pod \"ovn-operator-controller-manager-9976ff44c-tsgfd\" (UID: \"f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.034816 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-djzc5"] Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.034912 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-djzc5" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.040397 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-hh5jg" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.089356 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvncb\" (UniqueName: \"kubernetes.io/projected/f3fcce5a-2080-44f6-971c-d1bda3dd0fe0-kube-api-access-hvncb\") pod \"test-operator-controller-manager-f66b554c6-djzc5\" (UID: \"f3fcce5a-2080-44f6-971c-d1bda3dd0fe0\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-djzc5" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.089470 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mszsm\" (UniqueName: \"kubernetes.io/projected/a081f8cc-4fb8-457c-84de-2c7ba2c84821-kube-api-access-mszsm\") pod \"swift-operator-controller-manager-bc7dc7bd9-dsvl6\" (UID: \"a081f8cc-4fb8-457c-84de-2c7ba2c84821\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.089511 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxpxl\" (UniqueName: \"kubernetes.io/projected/e6074e1b-4192-43a7-b391-f4112d2486bf-kube-api-access-fxpxl\") pod \"telemetry-operator-controller-manager-b8d54b5d7-nwvfh\" (UID: \"e6074e1b-4192-43a7-b391-f4112d2486bf\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.100430 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc"] Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.101737 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.109370 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-8gz4x" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.115876 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc"] Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.131314 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.132275 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxpxl\" (UniqueName: \"kubernetes.io/projected/e6074e1b-4192-43a7-b391-f4112d2486bf-kube-api-access-fxpxl\") pod \"telemetry-operator-controller-manager-b8d54b5d7-nwvfh\" (UID: \"e6074e1b-4192-43a7-b391-f4112d2486bf\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.133934 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mszsm\" (UniqueName: \"kubernetes.io/projected/a081f8cc-4fb8-457c-84de-2c7ba2c84821-kube-api-access-mszsm\") pod \"swift-operator-controller-manager-bc7dc7bd9-dsvl6\" (UID: \"a081f8cc-4fb8-457c-84de-2c7ba2c84821\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.148880 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks"] Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.150462 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.157072 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-lsn5t" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.157305 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.179791 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks"] Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.184161 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.193981 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/dc289df2-abec-4f24-a873-82523204cb2b-cert\") pod \"openstack-operator-controller-manager-79b5487686-nzxks\" (UID: \"dc289df2-abec-4f24-a873-82523204cb2b\") " pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.194215 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvncb\" (UniqueName: \"kubernetes.io/projected/f3fcce5a-2080-44f6-971c-d1bda3dd0fe0-kube-api-access-hvncb\") pod \"test-operator-controller-manager-f66b554c6-djzc5\" (UID: \"f3fcce5a-2080-44f6-971c-d1bda3dd0fe0\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-djzc5" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.194265 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjxjq\" (UniqueName: \"kubernetes.io/projected/ba719558-c698-41e9-8b5e-a3449a6f9a7c-kube-api-access-mjxjq\") pod \"watcher-operator-controller-manager-6c4b8dd4dc-tbxsc\" (UID: \"ba719558-c698-41e9-8b5e-a3449a6f9a7c\") " pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.194375 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnhss\" (UniqueName: \"kubernetes.io/projected/dc289df2-abec-4f24-a873-82523204cb2b-kube-api-access-cnhss\") pod \"openstack-operator-controller-manager-79b5487686-nzxks\" (UID: \"dc289df2-abec-4f24-a873-82523204cb2b\") " pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.203570 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.212028 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn"] Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.213424 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.218035 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-wllzp" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.218181 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn"] Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.223306 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvncb\" (UniqueName: \"kubernetes.io/projected/f3fcce5a-2080-44f6-971c-d1bda3dd0fe0-kube-api-access-hvncb\") pod \"test-operator-controller-manager-f66b554c6-djzc5\" (UID: \"f3fcce5a-2080-44f6-971c-d1bda3dd0fe0\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-djzc5" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.258936 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.272369 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.295793 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjxjq\" (UniqueName: \"kubernetes.io/projected/ba719558-c698-41e9-8b5e-a3449a6f9a7c-kube-api-access-mjxjq\") pod \"watcher-operator-controller-manager-6c4b8dd4dc-tbxsc\" (UID: \"ba719558-c698-41e9-8b5e-a3449a6f9a7c\") " pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.296486 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnhss\" (UniqueName: \"kubernetes.io/projected/dc289df2-abec-4f24-a873-82523204cb2b-kube-api-access-cnhss\") pod \"openstack-operator-controller-manager-79b5487686-nzxks\" (UID: \"dc289df2-abec-4f24-a873-82523204cb2b\") " pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.296578 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9tx6\" (UniqueName: \"kubernetes.io/projected/f26267a9-08cf-4ff8-8fab-d1bfe01dbd65-kube-api-access-v9tx6\") pod \"rabbitmq-cluster-operator-manager-79d8469568-d4vnn\" (UID: \"f26267a9-08cf-4ff8-8fab-d1bfe01dbd65\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.296667 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/dc289df2-abec-4f24-a873-82523204cb2b-cert\") pod \"openstack-operator-controller-manager-79b5487686-nzxks\" (UID: \"dc289df2-abec-4f24-a873-82523204cb2b\") " pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.301971 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/dc289df2-abec-4f24-a873-82523204cb2b-cert\") pod \"openstack-operator-controller-manager-79b5487686-nzxks\" (UID: \"dc289df2-abec-4f24-a873-82523204cb2b\") " pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.326080 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnhss\" (UniqueName: \"kubernetes.io/projected/dc289df2-abec-4f24-a873-82523204cb2b-kube-api-access-cnhss\") pod \"openstack-operator-controller-manager-79b5487686-nzxks\" (UID: \"dc289df2-abec-4f24-a873-82523204cb2b\") " pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.327438 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjxjq\" (UniqueName: \"kubernetes.io/projected/ba719558-c698-41e9-8b5e-a3449a6f9a7c-kube-api-access-mjxjq\") pod \"watcher-operator-controller-manager-6c4b8dd4dc-tbxsc\" (UID: \"ba719558-c698-41e9-8b5e-a3449a6f9a7c\") " pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.377316 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.399690 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6969510-2750-4466-b064-7cb67a4acf7e-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-b5jlg\" (UID: \"b6969510-2750-4466-b064-7cb67a4acf7e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.399793 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9tx6\" (UniqueName: \"kubernetes.io/projected/f26267a9-08cf-4ff8-8fab-d1bfe01dbd65-kube-api-access-v9tx6\") pod \"rabbitmq-cluster-operator-manager-79d8469568-d4vnn\" (UID: \"f26267a9-08cf-4ff8-8fab-d1bfe01dbd65\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn" Sep 30 10:04:34 crc kubenswrapper[4730]: E0930 10:04:34.401654 4730 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 10:04:34 crc kubenswrapper[4730]: E0930 10:04:34.401738 4730 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b6969510-2750-4466-b064-7cb67a4acf7e-cert podName:b6969510-2750-4466-b064-7cb67a4acf7e nodeName:}" failed. No retries permitted until 2025-09-30 10:04:35.401717397 +0000 UTC m=+919.734977400 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b6969510-2750-4466-b064-7cb67a4acf7e-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-b5jlg" (UID: "b6969510-2750-4466-b064-7cb67a4acf7e") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.429047 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9tx6\" (UniqueName: \"kubernetes.io/projected/f26267a9-08cf-4ff8-8fab-d1bfe01dbd65-kube-api-access-v9tx6\") pod \"rabbitmq-cluster-operator-manager-79d8469568-d4vnn\" (UID: \"f26267a9-08cf-4ff8-8fab-d1bfe01dbd65\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.514927 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-djzc5" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.574554 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn" Sep 30 10:04:34 crc kubenswrapper[4730]: I0930 10:04:34.601529 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.025027 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.039721 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.042131 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.055295 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.330995 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.350781 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.364960 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.389786 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz"] Sep 30 10:04:35 crc kubenswrapper[4730]: W0930 10:04:35.403346 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27424124_82bf_42fa_a77b_fdbd44f5c24b.slice/crio-96e90d224f786f0c95947efed97f7781dcd84b5c761fbc59000900b1ebf27401 WatchSource:0}: Error finding container 96e90d224f786f0c95947efed97f7781dcd84b5c761fbc59000900b1ebf27401: Status 404 returned error can't find the container with id 96e90d224f786f0c95947efed97f7781dcd84b5c761fbc59000900b1ebf27401 Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.403778 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.416570 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6969510-2750-4466-b064-7cb67a4acf7e-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-b5jlg\" (UID: \"b6969510-2750-4466-b064-7cb67a4acf7e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.424550 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.424835 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b6969510-2750-4466-b064-7cb67a4acf7e-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-b5jlg\" (UID: \"b6969510-2750-4466-b064-7cb67a4acf7e\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.431138 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.443541 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb"] Sep 30 10:04:35 crc kubenswrapper[4730]: W0930 10:04:35.457473 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4201b1a7_e458_49b2_9536_91e6db49ea36.slice/crio-99b869c47b509e31131cc037e450d7e276d9e8cec8b4693e31a6d70b5a3bcddd WatchSource:0}: Error finding container 99b869c47b509e31131cc037e450d7e276d9e8cec8b4693e31a6d70b5a3bcddd: Status 404 returned error can't find the container with id 99b869c47b509e31131cc037e450d7e276d9e8cec8b4693e31a6d70b5a3bcddd Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.457680 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.458329 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r" event={"ID":"8c6c5d61-fd4e-4b83-9b60-7681c6fc19f3","Type":"ContainerStarted","Data":"e6fff631079407363ea7514e3b88a345d2fc38c33df224bc7d6d40e76aa9221d"} Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.460756 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g" event={"ID":"419a8cdd-e50e-42f8-b913-61214be0a9a5","Type":"ContainerStarted","Data":"9804a701668594a6cc30f7ac475f327cade2c28be6af5187129e7a2ffb418156"} Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.465868 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.470873 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9" event={"ID":"2758692b-990d-4330-9765-22614cd379a0","Type":"ContainerStarted","Data":"aa6c91bcd48759d08a569923de27daef394591f586d3b89ca9d1408a0f69dce1"} Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.474049 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jpkzs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-76fcc6dc7c-g7zc5_openstack-operators(0f10a085-8ce3-407b-a2ec-b6fabc38bc9f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.476529 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.477929 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr" event={"ID":"3cc5b4f8-09e7-44a7-aa40-f173ad8fb157","Type":"ContainerStarted","Data":"4097bd03feb363106cdc8b79f7b23646ce0218f43415ee8a34734d6644dbb251"} Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.480386 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-t7snm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-589c58c6c-zbmbb_openstack-operators(28fcf9ea-7f63-4add-bb31-99af57fcce2c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.485857 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.489565 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r" event={"ID":"04589829-1e63-438e-b6e8-bdaa6f5ebc19","Type":"ContainerStarted","Data":"676de152e469cb39a2fdeeed5d3cd6300b203b4d599dfbd0fc784af2449774f0"} Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.490748 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc" event={"ID":"978da32e-9bbe-453d-ba3f-32a89f23550e","Type":"ContainerStarted","Data":"85102ca47c3a2f96dbd04f35c5bc94fa403c6357796a753d545b7cf12c691a99"} Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.493644 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz" event={"ID":"0f748696-3e59-4b53-a5d2-1dce4b0b6a3a","Type":"ContainerStarted","Data":"d16d74e7bb6a1f506771e2efb8fa171edf795c6397e5aa9fc796c8ec95a2e705"} Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.494066 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.494688 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh" event={"ID":"e6074e1b-4192-43a7-b391-f4112d2486bf","Type":"ContainerStarted","Data":"6d061ca30e3af4a580c015d863de823955e36c2790d8201b4358688a818feeeb"} Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.499726 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.506563 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks"] Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.522675 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-v9tx6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-79d8469568-d4vnn_openstack-operators(f26267a9-08cf-4ff8-8fab-d1bfe01dbd65): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.523835 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn" podUID="f26267a9-08cf-4ff8-8fab-d1bfe01dbd65" Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.530529 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7qb78,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-5bd55b4bff-qsgdr_openstack-operators(c24a4e1a-10db-44f2-9de6-16f4081a5609): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.530731 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:f5f0d2eb534f763cf6578af513add1c21c1659b2cd75214dfddfedb9eebf6397,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z8xz6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-9f4696d94-mwgsf_openstack-operators(db4dd5c5-bcc5-4782-acf0-42d686edd287): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.541641 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.551545 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6"] Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.557756 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p9nct,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-9976ff44c-tsgfd_openstack-operators(f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.571527 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc"] Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.589341 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.610436 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.176:5001/openstack-k8s-operators/watcher-operator:11848010d1f648a21277d86e899ab9f735539cf1,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mjxjq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-6c4b8dd4dc-tbxsc_openstack-operators(ba719558-c698-41e9-8b5e-a3449a6f9a7c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.610386 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mszsm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-bc7dc7bd9-dsvl6_openstack-operators(a081f8cc-4fb8-457c-84de-2c7ba2c84821): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.611386 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-djzc5"] Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.684447 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" podUID="0f10a085-8ce3-407b-a2ec-b6fabc38bc9f" Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.751241 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" podUID="28fcf9ea-7f63-4add-bb31-99af57fcce2c" Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.859769 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" podUID="c24a4e1a-10db-44f2-9de6-16f4081a5609" Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.933284 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" podUID="f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e" Sep 30 10:04:35 crc kubenswrapper[4730]: E0930 10:04:35.935130 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" podUID="db4dd5c5-bcc5-4782-acf0-42d686edd287" Sep 30 10:04:35 crc kubenswrapper[4730]: I0930 10:04:35.995958 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg"] Sep 30 10:04:36 crc kubenswrapper[4730]: E0930 10:04:36.008597 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" podUID="a081f8cc-4fb8-457c-84de-2c7ba2c84821" Sep 30 10:04:36 crc kubenswrapper[4730]: E0930 10:04:36.023345 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" podUID="ba719558-c698-41e9-8b5e-a3449a6f9a7c" Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.550963 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" event={"ID":"0f10a085-8ce3-407b-a2ec-b6fabc38bc9f","Type":"ContainerStarted","Data":"96a6a96be9448d38126a778b8136f7e9ec954e1c6b137c83a22f4cccb624cd77"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.551313 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" event={"ID":"0f10a085-8ce3-407b-a2ec-b6fabc38bc9f","Type":"ContainerStarted","Data":"286ad221b274a99aef53ce922fe29d87b14cb644c07373f1e5bd64b7d2fdf2e6"} Sep 30 10:04:36 crc kubenswrapper[4730]: E0930 10:04:36.553687 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" podUID="0f10a085-8ce3-407b-a2ec-b6fabc38bc9f" Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.554354 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" event={"ID":"4201b1a7-e458-49b2-9536-91e6db49ea36","Type":"ContainerStarted","Data":"99b869c47b509e31131cc037e450d7e276d9e8cec8b4693e31a6d70b5a3bcddd"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.574750 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" event={"ID":"ba719558-c698-41e9-8b5e-a3449a6f9a7c","Type":"ContainerStarted","Data":"8f85841af658d1ee4b78f9ef13bd7002330613a3070fd786aac710b09390a5c8"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.574805 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" event={"ID":"ba719558-c698-41e9-8b5e-a3449a6f9a7c","Type":"ContainerStarted","Data":"cd4d2534865d24024a9d4a6f51cefb44ee8392de7125b2d8e600262574687b4a"} Sep 30 10:04:36 crc kubenswrapper[4730]: E0930 10:04:36.576039 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.176:5001/openstack-k8s-operators/watcher-operator:11848010d1f648a21277d86e899ab9f735539cf1\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" podUID="ba719558-c698-41e9-8b5e-a3449a6f9a7c" Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.579126 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-djzc5" event={"ID":"f3fcce5a-2080-44f6-971c-d1bda3dd0fe0","Type":"ContainerStarted","Data":"d6109af7fd9ff3b16be698b97d6d15a30da41a73fb17fbe777b987f40ef5a6a9"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.582100 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" event={"ID":"28fcf9ea-7f63-4add-bb31-99af57fcce2c","Type":"ContainerStarted","Data":"0424d9b32ba6a82b76ff50438e7fec7646b577fd02f85c2784adc53ee115f1d7"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.582134 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" event={"ID":"28fcf9ea-7f63-4add-bb31-99af57fcce2c","Type":"ContainerStarted","Data":"4e41c598885abfc0136e7cd401c529625bfa64b1bfe22e4db552ebd346bd387e"} Sep 30 10:04:36 crc kubenswrapper[4730]: E0930 10:04:36.583750 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" podUID="28fcf9ea-7f63-4add-bb31-99af57fcce2c" Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.588077 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" event={"ID":"c24a4e1a-10db-44f2-9de6-16f4081a5609","Type":"ContainerStarted","Data":"a4d10df3cd009144951472de8458333f5001651ac11e6fb805c710fce09d1168"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.588108 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" event={"ID":"c24a4e1a-10db-44f2-9de6-16f4081a5609","Type":"ContainerStarted","Data":"a971a0996e8c5984895c3fb41283de47b393f61f852d765bbb4892f08bc1da1a"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.610360 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv" event={"ID":"27424124-82bf-42fa-a77b-fdbd44f5c24b","Type":"ContainerStarted","Data":"96e90d224f786f0c95947efed97f7781dcd84b5c761fbc59000900b1ebf27401"} Sep 30 10:04:36 crc kubenswrapper[4730]: E0930 10:04:36.612448 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" podUID="c24a4e1a-10db-44f2-9de6-16f4081a5609" Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.614147 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" event={"ID":"b6969510-2750-4466-b064-7cb67a4acf7e","Type":"ContainerStarted","Data":"2970b79ff723f7c2ebbdf2aa16b276c14a13d771bd3006cf4020587e4a66a933"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.616506 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" event={"ID":"a081f8cc-4fb8-457c-84de-2c7ba2c84821","Type":"ContainerStarted","Data":"01758422e40170cb4acd365d234049c7db668bb56679a90453ff1baa4128a597"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.616556 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" event={"ID":"a081f8cc-4fb8-457c-84de-2c7ba2c84821","Type":"ContainerStarted","Data":"8866a36c2630dfe3a17dc23825ec66d1b5a94db9059439f3d518783dc7b7e31f"} Sep 30 10:04:36 crc kubenswrapper[4730]: E0930 10:04:36.619489 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" podUID="a081f8cc-4fb8-457c-84de-2c7ba2c84821" Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.622189 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7" event={"ID":"81c8b722-d28f-42d4-8bc0-b82b9eb34500","Type":"ContainerStarted","Data":"da6064c52dbeee033c2c73302308303cd9c1e26caaa03a532a01143576dfe0c3"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.627509 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn" event={"ID":"f26267a9-08cf-4ff8-8fab-d1bfe01dbd65","Type":"ContainerStarted","Data":"741ea72a8f03f2d4d01c075a795ce48efd9aae68c101992eab0fc593d06433ce"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.629832 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9" event={"ID":"7ed8122e-042b-4574-9522-99557d55eedc","Type":"ContainerStarted","Data":"34aadc21748bea748cd07c257bf192c30be248b7b764ccbd2672bdd6126c21d7"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.632305 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" event={"ID":"dc289df2-abec-4f24-a873-82523204cb2b","Type":"ContainerStarted","Data":"c3369b6946886574891e0f80df7ae4eca2a2924cff866edc9014fbcdc3683381"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.632356 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" event={"ID":"dc289df2-abec-4f24-a873-82523204cb2b","Type":"ContainerStarted","Data":"e4508b0dc884912d21626b66f5d26d50d58482a4715c80611f95b3a36bfb149d"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.632369 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" event={"ID":"dc289df2-abec-4f24-a873-82523204cb2b","Type":"ContainerStarted","Data":"722d58f2882e88703edba748ab939777c223164e9fe0558ec4296fa7381d1d2f"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.633739 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" Sep 30 10:04:36 crc kubenswrapper[4730]: E0930 10:04:36.636479 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn" podUID="f26267a9-08cf-4ff8-8fab-d1bfe01dbd65" Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.651447 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" event={"ID":"db4dd5c5-bcc5-4782-acf0-42d686edd287","Type":"ContainerStarted","Data":"edb08d8c748f0f7ae61327df1779b1c16ab61e3cfa7e01f92d215015051355ed"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.651501 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" event={"ID":"db4dd5c5-bcc5-4782-acf0-42d686edd287","Type":"ContainerStarted","Data":"875a1dc2dbe6b45434513b9c9b3e57868384cc9ce3ca64a2f23369ea6ca2fa9a"} Sep 30 10:04:36 crc kubenswrapper[4730]: E0930 10:04:36.652706 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:f5f0d2eb534f763cf6578af513add1c21c1659b2cd75214dfddfedb9eebf6397\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" podUID="db4dd5c5-bcc5-4782-acf0-42d686edd287" Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.654963 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" event={"ID":"f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e","Type":"ContainerStarted","Data":"1e1d02e0c769ba1142ef91fa305282d1a82563cc2d77c99ffd0beaf5466e5548"} Sep 30 10:04:36 crc kubenswrapper[4730]: I0930 10:04:36.654999 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" event={"ID":"f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e","Type":"ContainerStarted","Data":"814bbda169490f308116b1a91a6b0cbd9b0414bb63a10d5afa223249a552c673"} Sep 30 10:04:36 crc kubenswrapper[4730]: E0930 10:04:36.656456 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" podUID="f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e" Sep 30 10:04:37 crc kubenswrapper[4730]: I0930 10:04:37.239167 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" podStartSLOduration=4.239149091 podStartE2EDuration="4.239149091s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:04:37.237782964 +0000 UTC m=+921.571042967" watchObservedRunningTime="2025-09-30 10:04:37.239149091 +0000 UTC m=+921.572409094" Sep 30 10:04:37 crc kubenswrapper[4730]: E0930 10:04:37.676204 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" podUID="f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e" Sep 30 10:04:37 crc kubenswrapper[4730]: E0930 10:04:37.676682 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" podUID="0f10a085-8ce3-407b-a2ec-b6fabc38bc9f" Sep 30 10:04:37 crc kubenswrapper[4730]: E0930 10:04:37.676751 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.176:5001/openstack-k8s-operators/watcher-operator:11848010d1f648a21277d86e899ab9f735539cf1\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" podUID="ba719558-c698-41e9-8b5e-a3449a6f9a7c" Sep 30 10:04:37 crc kubenswrapper[4730]: E0930 10:04:37.676749 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" podUID="28fcf9ea-7f63-4add-bb31-99af57fcce2c" Sep 30 10:04:37 crc kubenswrapper[4730]: E0930 10:04:37.676796 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn" podUID="f26267a9-08cf-4ff8-8fab-d1bfe01dbd65" Sep 30 10:04:37 crc kubenswrapper[4730]: E0930 10:04:37.676808 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" podUID="a081f8cc-4fb8-457c-84de-2c7ba2c84821" Sep 30 10:04:37 crc kubenswrapper[4730]: E0930 10:04:37.676847 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" podUID="c24a4e1a-10db-44f2-9de6-16f4081a5609" Sep 30 10:04:37 crc kubenswrapper[4730]: E0930 10:04:37.676856 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:f5f0d2eb534f763cf6578af513add1c21c1659b2cd75214dfddfedb9eebf6397\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" podUID="db4dd5c5-bcc5-4782-acf0-42d686edd287" Sep 30 10:04:44 crc kubenswrapper[4730]: I0930 10:04:44.399952 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-79b5487686-nzxks" Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.841338 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv" event={"ID":"27424124-82bf-42fa-a77b-fdbd44f5c24b","Type":"ContainerStarted","Data":"82fc009aa135944a5453b2ebb71c2079d2ab6e5ca0a551eeea8aedccd4455cfe"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.843297 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" event={"ID":"b6969510-2750-4466-b064-7cb67a4acf7e","Type":"ContainerStarted","Data":"213ba28a70d167378fb5dab1e08d3c556f9222c78da20101f3d1a9aaf19392c3"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.844538 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7" event={"ID":"81c8b722-d28f-42d4-8bc0-b82b9eb34500","Type":"ContainerStarted","Data":"aad06cb33b75ff3be34701cd8272e7efcffcec69b8996a72d2d68fd1e1e624e3"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.845914 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r" event={"ID":"04589829-1e63-438e-b6e8-bdaa6f5ebc19","Type":"ContainerStarted","Data":"40ad59a013fd2e0020c0f0e5899936ffee07d48ec296cde0fccac9fd6e76e85e"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.868860 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" event={"ID":"4201b1a7-e458-49b2-9536-91e6db49ea36","Type":"ContainerStarted","Data":"7b280579682ced81cfaa8f69b9d61176dd8a2b0030e4b2a9e2a9a56c4df667b5"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.899564 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh" event={"ID":"e6074e1b-4192-43a7-b391-f4112d2486bf","Type":"ContainerStarted","Data":"fe2443373c0345097a8954112316e72e7b8016919a2c128773c21afabd33dfe9"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.900537 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-djzc5" event={"ID":"f3fcce5a-2080-44f6-971c-d1bda3dd0fe0","Type":"ContainerStarted","Data":"b2146a3d592be7d21605643ee3dfb429d0d521bc160fad3c91cea4061c20c794"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.901631 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz" event={"ID":"0f748696-3e59-4b53-a5d2-1dce4b0b6a3a","Type":"ContainerStarted","Data":"86eda86afe493284397655d7b060b009996158763530b3c919735439bed3d221"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.902466 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9" event={"ID":"7ed8122e-042b-4574-9522-99557d55eedc","Type":"ContainerStarted","Data":"932b1abefb3b191037973b7eefb50a1a67ae3972f5b1b789ff5106da83ddbae2"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.903483 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9" event={"ID":"2758692b-990d-4330-9765-22614cd379a0","Type":"ContainerStarted","Data":"a850b55c4a0d37ab65f65a97c920726a720322cf3249b16303bcd089716b1131"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.931020 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc" event={"ID":"978da32e-9bbe-453d-ba3f-32a89f23550e","Type":"ContainerStarted","Data":"5c567db2be4c11c6bc3466b24e576d73de9d197fc3194f7ec98edb3788ae7164"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.931065 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc" event={"ID":"978da32e-9bbe-453d-ba3f-32a89f23550e","Type":"ContainerStarted","Data":"a3fe471e4e2e92907ebcf3f79c101e493c4b746189c47b12632184a305d25cf8"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.931328 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc" Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.934702 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r" event={"ID":"8c6c5d61-fd4e-4b83-9b60-7681c6fc19f3","Type":"ContainerStarted","Data":"9679cc6721d69e012bbf1e7a4608e462a186b14c6fb4f04e07d2b4bbf1f9d57e"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.942438 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g" event={"ID":"419a8cdd-e50e-42f8-b913-61214be0a9a5","Type":"ContainerStarted","Data":"a729bd6675a8a2d5d0d71cd5a498f01541b1926be36ea46faeaca0f962721a3a"} Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.943116 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g" Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.967396 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc" podStartSLOduration=3.410389613 podStartE2EDuration="15.96738225s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.406854842 +0000 UTC m=+919.740114835" lastFinishedPulling="2025-09-30 10:04:47.963847479 +0000 UTC m=+932.297107472" observedRunningTime="2025-09-30 10:04:48.96333085 +0000 UTC m=+933.296590843" watchObservedRunningTime="2025-09-30 10:04:48.96738225 +0000 UTC m=+933.300642243" Sep 30 10:04:48 crc kubenswrapper[4730]: I0930 10:04:48.994954 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr" event={"ID":"3cc5b4f8-09e7-44a7-aa40-f173ad8fb157","Type":"ContainerStarted","Data":"afcf79dd8a351e05db99167abe7f10eac7cd8b83e743a55b360d2f7bb002a4c3"} Sep 30 10:04:49 crc kubenswrapper[4730]: I0930 10:04:49.111404 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g" podStartSLOduration=3.280859699 podStartE2EDuration="16.111386357s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.061214123 +0000 UTC m=+919.394474116" lastFinishedPulling="2025-09-30 10:04:47.891740781 +0000 UTC m=+932.225000774" observedRunningTime="2025-09-30 10:04:49.104993824 +0000 UTC m=+933.438253827" watchObservedRunningTime="2025-09-30 10:04:49.111386357 +0000 UTC m=+933.444646350" Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.017324 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz" event={"ID":"0f748696-3e59-4b53-a5d2-1dce4b0b6a3a","Type":"ContainerStarted","Data":"30ff44751a039dd6050165070624a0957fbc2f73a234b790fcde0befbe02340d"} Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.024724 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz" Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.024778 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9" Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.024789 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9" event={"ID":"7ed8122e-042b-4574-9522-99557d55eedc","Type":"ContainerStarted","Data":"537eee7e5e16d546949227bcb4e9ef709b241d29df8d04a0a598a2d064932cfd"} Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.025593 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r" event={"ID":"04589829-1e63-438e-b6e8-bdaa6f5ebc19","Type":"ContainerStarted","Data":"c1ac886a4c856b58d676339ad9fd189a7276c01c54277a3c800fdd9720d25c3f"} Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.025711 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r" Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.027482 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g" event={"ID":"419a8cdd-e50e-42f8-b913-61214be0a9a5","Type":"ContainerStarted","Data":"2f2f84518c90dcf16e1fc825b1192678ab0c3bed081c2055f2d2a4a7641d9991"} Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.029974 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr" event={"ID":"3cc5b4f8-09e7-44a7-aa40-f173ad8fb157","Type":"ContainerStarted","Data":"3bc9a337a0aa63c6d520b7bc6a966ffa95660253bda78cdd54bb3947128a64bb"} Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.030056 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr" Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.032105 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv" event={"ID":"27424124-82bf-42fa-a77b-fdbd44f5c24b","Type":"ContainerStarted","Data":"ac44203d438607ad39043c8cedb61a35f64ea40acf89b91ee01fabfe46af7cfa"} Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.032230 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv" Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.035814 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" event={"ID":"b6969510-2750-4466-b064-7cb67a4acf7e","Type":"ContainerStarted","Data":"f9951fb92c3a2ad4d741bc0cdcb501e8ee2b43157dbe6229e63bc5d75c5f4f6f"} Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.035852 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.103570 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz" podStartSLOduration=4.537157468 podStartE2EDuration="17.10355596s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.364159223 +0000 UTC m=+919.697419216" lastFinishedPulling="2025-09-30 10:04:47.930557715 +0000 UTC m=+932.263817708" observedRunningTime="2025-09-30 10:04:50.099764577 +0000 UTC m=+934.433024570" watchObservedRunningTime="2025-09-30 10:04:50.10355596 +0000 UTC m=+934.436815953" Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.184599 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr" podStartSLOduration=4.666838246 podStartE2EDuration="17.184580088s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.406221734 +0000 UTC m=+919.739481727" lastFinishedPulling="2025-09-30 10:04:47.923963576 +0000 UTC m=+932.257223569" observedRunningTime="2025-09-30 10:04:50.129021561 +0000 UTC m=+934.462281554" watchObservedRunningTime="2025-09-30 10:04:50.184580088 +0000 UTC m=+934.517840081" Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.184703 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv" podStartSLOduration=4.722419485 podStartE2EDuration="17.184699732s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.456324584 +0000 UTC m=+919.789584577" lastFinishedPulling="2025-09-30 10:04:47.918604831 +0000 UTC m=+932.251864824" observedRunningTime="2025-09-30 10:04:50.180938149 +0000 UTC m=+934.514198142" watchObservedRunningTime="2025-09-30 10:04:50.184699732 +0000 UTC m=+934.517959725" Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.250216 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9" podStartSLOduration=4.793303638 podStartE2EDuration="17.250194859s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.471683361 +0000 UTC m=+919.804943354" lastFinishedPulling="2025-09-30 10:04:47.928574582 +0000 UTC m=+932.261834575" observedRunningTime="2025-09-30 10:04:50.237835804 +0000 UTC m=+934.571095797" watchObservedRunningTime="2025-09-30 10:04:50.250194859 +0000 UTC m=+934.583454862" Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.337600 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" podStartSLOduration=5.420742864 podStartE2EDuration="17.33757917s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:36.019789443 +0000 UTC m=+920.353049436" lastFinishedPulling="2025-09-30 10:04:47.936625749 +0000 UTC m=+932.269885742" observedRunningTime="2025-09-30 10:04:50.336206443 +0000 UTC m=+934.669466446" watchObservedRunningTime="2025-09-30 10:04:50.33757917 +0000 UTC m=+934.670839173" Sep 30 10:04:50 crc kubenswrapper[4730]: I0930 10:04:50.418035 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r" podStartSLOduration=4.529653394 podStartE2EDuration="17.418009463s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.041814386 +0000 UTC m=+919.375074379" lastFinishedPulling="2025-09-30 10:04:47.930170455 +0000 UTC m=+932.263430448" observedRunningTime="2025-09-30 10:04:50.416020438 +0000 UTC m=+934.749280431" watchObservedRunningTime="2025-09-30 10:04:50.418009463 +0000 UTC m=+934.751269456" Sep 30 10:04:51 crc kubenswrapper[4730]: I0930 10:04:51.062074 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-djzc5" event={"ID":"f3fcce5a-2080-44f6-971c-d1bda3dd0fe0","Type":"ContainerStarted","Data":"eac5e96570cebf6bed530df1cbadb7400c50fa4582904f79b7001d0b3311623f"} Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.078063 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh" event={"ID":"e6074e1b-4192-43a7-b391-f4112d2486bf","Type":"ContainerStarted","Data":"545a8441c8a655c21ec4a070d30734f1a60aeefb38835e760563e89735910a20"} Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.080575 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7" event={"ID":"81c8b722-d28f-42d4-8bc0-b82b9eb34500","Type":"ContainerStarted","Data":"ed58b747078faf7667462ad63cc99f99063876b081f28368b9d5d7111e11e802"} Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.081360 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7" Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.083584 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r" event={"ID":"8c6c5d61-fd4e-4b83-9b60-7681c6fc19f3","Type":"ContainerStarted","Data":"b449571a242aceff9abba0ab00c03835ffb4241cd40ab4fc55e8dc8276b214de"} Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.083858 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7" Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.085260 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9" event={"ID":"2758692b-990d-4330-9765-22614cd379a0","Type":"ContainerStarted","Data":"3b721714aa19b19e5848c6c076ba633cf08ba0022b94471085294844cc3e0ca3"} Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.087252 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" event={"ID":"4201b1a7-e458-49b2-9536-91e6db49ea36","Type":"ContainerStarted","Data":"2bf94ed43f315403c821def837f466398c8ce7af015b06a529f3be35441f4bda"} Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.087459 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-f66b554c6-djzc5" Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.091182 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-f66b554c6-djzc5" Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.100292 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-88c7-pr4r7" podStartSLOduration=7.626509467 podStartE2EDuration="20.100276076s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.457375233 +0000 UTC m=+919.790635226" lastFinishedPulling="2025-09-30 10:04:47.931141842 +0000 UTC m=+932.264401835" observedRunningTime="2025-09-30 10:04:53.099061263 +0000 UTC m=+937.432321256" watchObservedRunningTime="2025-09-30 10:04:53.100276076 +0000 UTC m=+937.433536069" Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.141375 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-f66b554c6-djzc5" podStartSLOduration=7.870572361 podStartE2EDuration="20.141354361s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.642151277 +0000 UTC m=+919.975411270" lastFinishedPulling="2025-09-30 10:04:47.912933277 +0000 UTC m=+932.246193270" observedRunningTime="2025-09-30 10:04:53.135550714 +0000 UTC m=+937.468810707" watchObservedRunningTime="2025-09-30 10:04:53.141354361 +0000 UTC m=+937.474614364" Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.529315 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nk8jc" Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.547850 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-qx68r" Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.586282 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-jdfxz" Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.615198 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-p957g" Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.769957 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-72mvr" Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.891654 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-bgczv" Sep 30 10:04:53 crc kubenswrapper[4730]: I0930 10:04:53.935850 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-z5tb9" Sep 30 10:04:54 crc kubenswrapper[4730]: I0930 10:04:54.103979 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh" Sep 30 10:04:54 crc kubenswrapper[4730]: I0930 10:04:54.104045 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9" Sep 30 10:04:54 crc kubenswrapper[4730]: I0930 10:04:54.105851 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9" Sep 30 10:04:54 crc kubenswrapper[4730]: I0930 10:04:54.110219 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh" Sep 30 10:04:54 crc kubenswrapper[4730]: I0930 10:04:54.134377 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-tglx9" podStartSLOduration=8.255816304 podStartE2EDuration="21.134352016s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.05006093 +0000 UTC m=+919.383320923" lastFinishedPulling="2025-09-30 10:04:47.928596642 +0000 UTC m=+932.261856635" observedRunningTime="2025-09-30 10:04:54.12122893 +0000 UTC m=+938.454488933" watchObservedRunningTime="2025-09-30 10:04:54.134352016 +0000 UTC m=+938.467612019" Sep 30 10:04:54 crc kubenswrapper[4730]: I0930 10:04:54.153099 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" podStartSLOduration=8.642021254 podStartE2EDuration="21.153074524s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.462996765 +0000 UTC m=+919.796256748" lastFinishedPulling="2025-09-30 10:04:47.974050025 +0000 UTC m=+932.307310018" observedRunningTime="2025-09-30 10:04:54.140288077 +0000 UTC m=+938.473548070" watchObservedRunningTime="2025-09-30 10:04:54.153074524 +0000 UTC m=+938.486334517" Sep 30 10:04:54 crc kubenswrapper[4730]: I0930 10:04:54.211666 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-nwvfh" podStartSLOduration=8.649493445 podStartE2EDuration="21.211649253s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.367601626 +0000 UTC m=+919.700861619" lastFinishedPulling="2025-09-30 10:04:47.929757434 +0000 UTC m=+932.263017427" observedRunningTime="2025-09-30 10:04:54.209410503 +0000 UTC m=+938.542670496" watchObservedRunningTime="2025-09-30 10:04:54.211649253 +0000 UTC m=+938.544909246" Sep 30 10:04:54 crc kubenswrapper[4730]: I0930 10:04:54.248261 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r" podStartSLOduration=8.715037055 podStartE2EDuration="21.248237266s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.396033758 +0000 UTC m=+919.729293751" lastFinishedPulling="2025-09-30 10:04:47.929233979 +0000 UTC m=+932.262493962" observedRunningTime="2025-09-30 10:04:54.228952912 +0000 UTC m=+938.562212905" watchObservedRunningTime="2025-09-30 10:04:54.248237266 +0000 UTC m=+938.581497259" Sep 30 10:04:54 crc kubenswrapper[4730]: I0930 10:04:54.261212 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" Sep 30 10:04:54 crc kubenswrapper[4730]: I0930 10:04:54.270726 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-rxq94" Sep 30 10:04:55 crc kubenswrapper[4730]: I0930 10:04:55.609155 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-b5jlg" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.205627 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" event={"ID":"db4dd5c5-bcc5-4782-acf0-42d686edd287","Type":"ContainerStarted","Data":"69dff43dba6e4e9563cc15e724246a75d2e50c4d2dd524ad1f0c504f756eb26e"} Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.206709 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.208029 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" event={"ID":"ba719558-c698-41e9-8b5e-a3449a6f9a7c","Type":"ContainerStarted","Data":"216a2892cf76f608f467b524088a846256a19624c800db11307ddca48de50c87"} Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.208516 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.209689 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" event={"ID":"f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e","Type":"ContainerStarted","Data":"f6750bfbd6751bf02918e4925ed018c4a3e86b090b99a3783686491e3ea729b4"} Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.210021 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.211516 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" event={"ID":"28fcf9ea-7f63-4add-bb31-99af57fcce2c","Type":"ContainerStarted","Data":"f2f6ec7f07bbd0486645f59f67e7d586bf43ac4b9f20e961beff4f39b21c252e"} Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.211705 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.213031 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" event={"ID":"c24a4e1a-10db-44f2-9de6-16f4081a5609","Type":"ContainerStarted","Data":"b70c8282edaee8d400c04667b987695dbddd7e62e00dd140ff6969bc2c4d7ec2"} Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.213179 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.214367 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn" event={"ID":"f26267a9-08cf-4ff8-8fab-d1bfe01dbd65","Type":"ContainerStarted","Data":"7e191d493ab7f4a17c0dd993459b2802ca38dc52890f04a74a72f3d8bcb8d436"} Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.215778 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" event={"ID":"a081f8cc-4fb8-457c-84de-2c7ba2c84821","Type":"ContainerStarted","Data":"fd0c95a9c3bd025b5a4c845d7ba23b66a88501dfa932d464fd09d1acce3c81a2"} Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.216143 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.217365 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" event={"ID":"0f10a085-8ce3-407b-a2ec-b6fabc38bc9f","Type":"ContainerStarted","Data":"a3c9012b26c15deb3056c389c8adc245a824c2a2bba4d71122fde5453bb0067f"} Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.217770 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.228654 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" podStartSLOduration=3.614955614 podStartE2EDuration="29.228639577s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.530654211 +0000 UTC m=+919.863914214" lastFinishedPulling="2025-09-30 10:05:01.144338184 +0000 UTC m=+945.477598177" observedRunningTime="2025-09-30 10:05:02.227844295 +0000 UTC m=+946.561104298" watchObservedRunningTime="2025-09-30 10:05:02.228639577 +0000 UTC m=+946.561899570" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.323975 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" podStartSLOduration=3.573652963 podStartE2EDuration="29.323955603s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.530394444 +0000 UTC m=+919.863654447" lastFinishedPulling="2025-09-30 10:05:01.280697094 +0000 UTC m=+945.613957087" observedRunningTime="2025-09-30 10:05:02.306656164 +0000 UTC m=+946.639916157" watchObservedRunningTime="2025-09-30 10:05:02.323955603 +0000 UTC m=+946.657215596" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.326961 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" podStartSLOduration=3.657444275 podStartE2EDuration="29.326950714s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.47390227 +0000 UTC m=+919.807162263" lastFinishedPulling="2025-09-30 10:05:01.143408709 +0000 UTC m=+945.476668702" observedRunningTime="2025-09-30 10:05:02.320673784 +0000 UTC m=+946.653933787" watchObservedRunningTime="2025-09-30 10:05:02.326950714 +0000 UTC m=+946.660210707" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.336278 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.336323 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.336360 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.336984 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"900986cbdecf38d2005d5e11f37ce0d1a6c8ab5af66f64b87d1373420d3568ee"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.337039 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://900986cbdecf38d2005d5e11f37ce0d1a6c8ab5af66f64b87d1373420d3568ee" gracePeriod=600 Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.337243 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" podStartSLOduration=6.494305767 podStartE2EDuration="29.337220903s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.610134778 +0000 UTC m=+919.943394771" lastFinishedPulling="2025-09-30 10:04:58.453049924 +0000 UTC m=+942.786309907" observedRunningTime="2025-09-30 10:05:02.335843776 +0000 UTC m=+946.669103769" watchObservedRunningTime="2025-09-30 10:05:02.337220903 +0000 UTC m=+946.670480896" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.377979 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-d4vnn" podStartSLOduration=2.59236138 podStartE2EDuration="28.377965449s" podCreationTimestamp="2025-09-30 10:04:34 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.52252715 +0000 UTC m=+919.855787153" lastFinishedPulling="2025-09-30 10:05:01.308131229 +0000 UTC m=+945.641391222" observedRunningTime="2025-09-30 10:05:02.375101411 +0000 UTC m=+946.708361394" watchObservedRunningTime="2025-09-30 10:05:02.377965449 +0000 UTC m=+946.711225442" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.400442 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" podStartSLOduration=3.7392028059999998 podStartE2EDuration="29.400422328s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.480199802 +0000 UTC m=+919.813459795" lastFinishedPulling="2025-09-30 10:05:01.141419324 +0000 UTC m=+945.474679317" observedRunningTime="2025-09-30 10:05:02.398348211 +0000 UTC m=+946.731608204" watchObservedRunningTime="2025-09-30 10:05:02.400422328 +0000 UTC m=+946.733682321" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.423399 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" podStartSLOduration=3.857816334 podStartE2EDuration="29.423379451s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.557517009 +0000 UTC m=+919.890777002" lastFinishedPulling="2025-09-30 10:05:01.123080136 +0000 UTC m=+945.456340119" observedRunningTime="2025-09-30 10:05:02.421133569 +0000 UTC m=+946.754393562" watchObservedRunningTime="2025-09-30 10:05:02.423379451 +0000 UTC m=+946.756639444" Sep 30 10:05:02 crc kubenswrapper[4730]: I0930 10:05:02.445184 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" podStartSLOduration=3.932158902 podStartE2EDuration="29.445160992s" podCreationTimestamp="2025-09-30 10:04:33 +0000 UTC" firstStartedPulling="2025-09-30 10:04:35.610072626 +0000 UTC m=+919.943332619" lastFinishedPulling="2025-09-30 10:05:01.123074716 +0000 UTC m=+945.456334709" observedRunningTime="2025-09-30 10:05:02.437564526 +0000 UTC m=+946.770824519" watchObservedRunningTime="2025-09-30 10:05:02.445160992 +0000 UTC m=+946.778420985" Sep 30 10:05:03 crc kubenswrapper[4730]: I0930 10:05:03.227040 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="900986cbdecf38d2005d5e11f37ce0d1a6c8ab5af66f64b87d1373420d3568ee" exitCode=0 Sep 30 10:05:03 crc kubenswrapper[4730]: I0930 10:05:03.227075 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"900986cbdecf38d2005d5e11f37ce0d1a6c8ab5af66f64b87d1373420d3568ee"} Sep 30 10:05:03 crc kubenswrapper[4730]: I0930 10:05:03.227601 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"3472c6d9d1cf6cc70effd10d384c0280a404f7b8fcfc840434d206e9db23adc4"} Sep 30 10:05:03 crc kubenswrapper[4730]: I0930 10:05:03.227644 4730 scope.go:117] "RemoveContainer" containerID="e4ef153cbbd5d6d6260e417ec2d4e0d4bbc0012c9d4b4d0945d491a415dda27d" Sep 30 10:05:03 crc kubenswrapper[4730]: I0930 10:05:03.912025 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r" Sep 30 10:05:03 crc kubenswrapper[4730]: I0930 10:05:03.915573 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-6v77r" Sep 30 10:05:13 crc kubenswrapper[4730]: I0930 10:05:13.683629 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-mwgsf" Sep 30 10:05:13 crc kubenswrapper[4730]: I0930 10:05:13.772690 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-qsgdr" Sep 30 10:05:14 crc kubenswrapper[4730]: I0930 10:05:14.030674 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-g7zc5" Sep 30 10:05:14 crc kubenswrapper[4730]: I0930 10:05:14.134241 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-zbmbb" Sep 30 10:05:14 crc kubenswrapper[4730]: I0930 10:05:14.187338 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-tsgfd" Sep 30 10:05:14 crc kubenswrapper[4730]: I0930 10:05:14.209367 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dsvl6" Sep 30 10:05:14 crc kubenswrapper[4730]: I0930 10:05:14.606199 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-6c4b8dd4dc-tbxsc" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.119622 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c58b4c7b9-pcd2w"] Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.122429 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c58b4c7b9-pcd2w" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.130510 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.130793 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.130940 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.131357 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-xqjnh" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.136762 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c58b4c7b9-pcd2w"] Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.180136 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnhsr\" (UniqueName: \"kubernetes.io/projected/1536e158-ee29-48ad-88d6-9e6d148085e7-kube-api-access-gnhsr\") pod \"dnsmasq-dns-6c58b4c7b9-pcd2w\" (UID: \"1536e158-ee29-48ad-88d6-9e6d148085e7\") " pod="openstack/dnsmasq-dns-6c58b4c7b9-pcd2w" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.180244 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1536e158-ee29-48ad-88d6-9e6d148085e7-config\") pod \"dnsmasq-dns-6c58b4c7b9-pcd2w\" (UID: \"1536e158-ee29-48ad-88d6-9e6d148085e7\") " pod="openstack/dnsmasq-dns-6c58b4c7b9-pcd2w" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.217181 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7dff86fb65-5r2md"] Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.219370 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.223528 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.233846 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7dff86fb65-5r2md"] Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.281482 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnhsr\" (UniqueName: \"kubernetes.io/projected/1536e158-ee29-48ad-88d6-9e6d148085e7-kube-api-access-gnhsr\") pod \"dnsmasq-dns-6c58b4c7b9-pcd2w\" (UID: \"1536e158-ee29-48ad-88d6-9e6d148085e7\") " pod="openstack/dnsmasq-dns-6c58b4c7b9-pcd2w" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.281569 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfxnn\" (UniqueName: \"kubernetes.io/projected/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-kube-api-access-zfxnn\") pod \"dnsmasq-dns-7dff86fb65-5r2md\" (UID: \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\") " pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.281607 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1536e158-ee29-48ad-88d6-9e6d148085e7-config\") pod \"dnsmasq-dns-6c58b4c7b9-pcd2w\" (UID: \"1536e158-ee29-48ad-88d6-9e6d148085e7\") " pod="openstack/dnsmasq-dns-6c58b4c7b9-pcd2w" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.281670 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-dns-svc\") pod \"dnsmasq-dns-7dff86fb65-5r2md\" (UID: \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\") " pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.281711 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-config\") pod \"dnsmasq-dns-7dff86fb65-5r2md\" (UID: \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\") " pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.282637 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1536e158-ee29-48ad-88d6-9e6d148085e7-config\") pod \"dnsmasq-dns-6c58b4c7b9-pcd2w\" (UID: \"1536e158-ee29-48ad-88d6-9e6d148085e7\") " pod="openstack/dnsmasq-dns-6c58b4c7b9-pcd2w" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.303021 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnhsr\" (UniqueName: \"kubernetes.io/projected/1536e158-ee29-48ad-88d6-9e6d148085e7-kube-api-access-gnhsr\") pod \"dnsmasq-dns-6c58b4c7b9-pcd2w\" (UID: \"1536e158-ee29-48ad-88d6-9e6d148085e7\") " pod="openstack/dnsmasq-dns-6c58b4c7b9-pcd2w" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.383179 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-dns-svc\") pod \"dnsmasq-dns-7dff86fb65-5r2md\" (UID: \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\") " pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.383236 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-config\") pod \"dnsmasq-dns-7dff86fb65-5r2md\" (UID: \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\") " pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.383313 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfxnn\" (UniqueName: \"kubernetes.io/projected/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-kube-api-access-zfxnn\") pod \"dnsmasq-dns-7dff86fb65-5r2md\" (UID: \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\") " pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.384208 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-dns-svc\") pod \"dnsmasq-dns-7dff86fb65-5r2md\" (UID: \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\") " pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.384247 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-config\") pod \"dnsmasq-dns-7dff86fb65-5r2md\" (UID: \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\") " pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.400270 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfxnn\" (UniqueName: \"kubernetes.io/projected/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-kube-api-access-zfxnn\") pod \"dnsmasq-dns-7dff86fb65-5r2md\" (UID: \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\") " pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.465315 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c58b4c7b9-pcd2w" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.542821 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" Sep 30 10:05:33 crc kubenswrapper[4730]: I0930 10:05:33.926960 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c58b4c7b9-pcd2w"] Sep 30 10:05:34 crc kubenswrapper[4730]: I0930 10:05:34.003215 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7dff86fb65-5r2md"] Sep 30 10:05:34 crc kubenswrapper[4730]: W0930 10:05:34.007990 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c8b83f7_40a2_4215_b77d_828bc2ac8a44.slice/crio-7ab03fe9fad9717129531a7f7644c337161073b155cac983cd864a6d6daa9c72 WatchSource:0}: Error finding container 7ab03fe9fad9717129531a7f7644c337161073b155cac983cd864a6d6daa9c72: Status 404 returned error can't find the container with id 7ab03fe9fad9717129531a7f7644c337161073b155cac983cd864a6d6daa9c72 Sep 30 10:05:34 crc kubenswrapper[4730]: I0930 10:05:34.461054 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c58b4c7b9-pcd2w" event={"ID":"1536e158-ee29-48ad-88d6-9e6d148085e7","Type":"ContainerStarted","Data":"b9467c5c6abc803b051d6d3f490a6f0693ab27679151f7f8300f92614efd2c3f"} Sep 30 10:05:34 crc kubenswrapper[4730]: I0930 10:05:34.462198 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" event={"ID":"5c8b83f7-40a2-4215-b77d-828bc2ac8a44","Type":"ContainerStarted","Data":"7ab03fe9fad9717129531a7f7644c337161073b155cac983cd864a6d6daa9c72"} Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.247583 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c58b4c7b9-pcd2w"] Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.278411 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-665d695fd9-t78qs"] Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.280264 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665d695fd9-t78qs" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.293899 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-665d695fd9-t78qs"] Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.336829 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/08995951-fead-4afd-a764-3a6963d8c7df-dns-svc\") pod \"dnsmasq-dns-665d695fd9-t78qs\" (UID: \"08995951-fead-4afd-a764-3a6963d8c7df\") " pod="openstack/dnsmasq-dns-665d695fd9-t78qs" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.336915 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08995951-fead-4afd-a764-3a6963d8c7df-config\") pod \"dnsmasq-dns-665d695fd9-t78qs\" (UID: \"08995951-fead-4afd-a764-3a6963d8c7df\") " pod="openstack/dnsmasq-dns-665d695fd9-t78qs" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.336942 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rz6z\" (UniqueName: \"kubernetes.io/projected/08995951-fead-4afd-a764-3a6963d8c7df-kube-api-access-6rz6z\") pod \"dnsmasq-dns-665d695fd9-t78qs\" (UID: \"08995951-fead-4afd-a764-3a6963d8c7df\") " pod="openstack/dnsmasq-dns-665d695fd9-t78qs" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.439996 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08995951-fead-4afd-a764-3a6963d8c7df-config\") pod \"dnsmasq-dns-665d695fd9-t78qs\" (UID: \"08995951-fead-4afd-a764-3a6963d8c7df\") " pod="openstack/dnsmasq-dns-665d695fd9-t78qs" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.440063 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08995951-fead-4afd-a764-3a6963d8c7df-config\") pod \"dnsmasq-dns-665d695fd9-t78qs\" (UID: \"08995951-fead-4afd-a764-3a6963d8c7df\") " pod="openstack/dnsmasq-dns-665d695fd9-t78qs" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.440109 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rz6z\" (UniqueName: \"kubernetes.io/projected/08995951-fead-4afd-a764-3a6963d8c7df-kube-api-access-6rz6z\") pod \"dnsmasq-dns-665d695fd9-t78qs\" (UID: \"08995951-fead-4afd-a764-3a6963d8c7df\") " pod="openstack/dnsmasq-dns-665d695fd9-t78qs" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.440724 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/08995951-fead-4afd-a764-3a6963d8c7df-dns-svc\") pod \"dnsmasq-dns-665d695fd9-t78qs\" (UID: \"08995951-fead-4afd-a764-3a6963d8c7df\") " pod="openstack/dnsmasq-dns-665d695fd9-t78qs" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.441455 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/08995951-fead-4afd-a764-3a6963d8c7df-dns-svc\") pod \"dnsmasq-dns-665d695fd9-t78qs\" (UID: \"08995951-fead-4afd-a764-3a6963d8c7df\") " pod="openstack/dnsmasq-dns-665d695fd9-t78qs" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.463536 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rz6z\" (UniqueName: \"kubernetes.io/projected/08995951-fead-4afd-a764-3a6963d8c7df-kube-api-access-6rz6z\") pod \"dnsmasq-dns-665d695fd9-t78qs\" (UID: \"08995951-fead-4afd-a764-3a6963d8c7df\") " pod="openstack/dnsmasq-dns-665d695fd9-t78qs" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.579065 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7dff86fb65-5r2md"] Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.615561 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-db978b4d7-2wdwq"] Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.616933 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.624800 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665d695fd9-t78qs" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.626546 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-db978b4d7-2wdwq"] Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.745273 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v47r\" (UniqueName: \"kubernetes.io/projected/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-kube-api-access-5v47r\") pod \"dnsmasq-dns-db978b4d7-2wdwq\" (UID: \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\") " pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.747889 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-config\") pod \"dnsmasq-dns-db978b4d7-2wdwq\" (UID: \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\") " pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.748151 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-dns-svc\") pod \"dnsmasq-dns-db978b4d7-2wdwq\" (UID: \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\") " pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.849809 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-dns-svc\") pod \"dnsmasq-dns-db978b4d7-2wdwq\" (UID: \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\") " pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.849882 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v47r\" (UniqueName: \"kubernetes.io/projected/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-kube-api-access-5v47r\") pod \"dnsmasq-dns-db978b4d7-2wdwq\" (UID: \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\") " pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.849903 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-config\") pod \"dnsmasq-dns-db978b4d7-2wdwq\" (UID: \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\") " pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.850665 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-config\") pod \"dnsmasq-dns-db978b4d7-2wdwq\" (UID: \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\") " pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.851152 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-dns-svc\") pod \"dnsmasq-dns-db978b4d7-2wdwq\" (UID: \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\") " pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.855499 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-665d695fd9-t78qs"] Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.877844 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v47r\" (UniqueName: \"kubernetes.io/projected/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-kube-api-access-5v47r\") pod \"dnsmasq-dns-db978b4d7-2wdwq\" (UID: \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\") " pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.882497 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-654794fb59-cf4b6"] Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.883996 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.892331 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-654794fb59-cf4b6"] Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.934432 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.951424 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2hlz\" (UniqueName: \"kubernetes.io/projected/635201a1-9967-46a0-8561-14d9d70d0c6c-kube-api-access-d2hlz\") pod \"dnsmasq-dns-654794fb59-cf4b6\" (UID: \"635201a1-9967-46a0-8561-14d9d70d0c6c\") " pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.951702 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/635201a1-9967-46a0-8561-14d9d70d0c6c-config\") pod \"dnsmasq-dns-654794fb59-cf4b6\" (UID: \"635201a1-9967-46a0-8561-14d9d70d0c6c\") " pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:05:37 crc kubenswrapper[4730]: I0930 10:05:37.951824 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/635201a1-9967-46a0-8561-14d9d70d0c6c-dns-svc\") pod \"dnsmasq-dns-654794fb59-cf4b6\" (UID: \"635201a1-9967-46a0-8561-14d9d70d0c6c\") " pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.053532 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/635201a1-9967-46a0-8561-14d9d70d0c6c-config\") pod \"dnsmasq-dns-654794fb59-cf4b6\" (UID: \"635201a1-9967-46a0-8561-14d9d70d0c6c\") " pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.053624 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/635201a1-9967-46a0-8561-14d9d70d0c6c-dns-svc\") pod \"dnsmasq-dns-654794fb59-cf4b6\" (UID: \"635201a1-9967-46a0-8561-14d9d70d0c6c\") " pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.053714 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2hlz\" (UniqueName: \"kubernetes.io/projected/635201a1-9967-46a0-8561-14d9d70d0c6c-kube-api-access-d2hlz\") pod \"dnsmasq-dns-654794fb59-cf4b6\" (UID: \"635201a1-9967-46a0-8561-14d9d70d0c6c\") " pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.054698 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/635201a1-9967-46a0-8561-14d9d70d0c6c-config\") pod \"dnsmasq-dns-654794fb59-cf4b6\" (UID: \"635201a1-9967-46a0-8561-14d9d70d0c6c\") " pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.054734 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/635201a1-9967-46a0-8561-14d9d70d0c6c-dns-svc\") pod \"dnsmasq-dns-654794fb59-cf4b6\" (UID: \"635201a1-9967-46a0-8561-14d9d70d0c6c\") " pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.069570 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2hlz\" (UniqueName: \"kubernetes.io/projected/635201a1-9967-46a0-8561-14d9d70d0c6c-kube-api-access-d2hlz\") pod \"dnsmasq-dns-654794fb59-cf4b6\" (UID: \"635201a1-9967-46a0-8561-14d9d70d0c6c\") " pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.220813 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.487120 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.488447 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.491846 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.492489 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.492676 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.492846 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.492956 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-cbldm" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.493010 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.493134 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.524085 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.560752 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6cf0ebea-06fc-47b2-a2c6-95605e023f94-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.560815 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.560850 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.560879 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6cf0ebea-06fc-47b2-a2c6-95605e023f94-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.560902 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.560922 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.560975 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxbpr\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-kube-api-access-gxbpr\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.560997 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-config-data\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.561019 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.561038 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.561061 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.661840 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.661880 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.661901 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6cf0ebea-06fc-47b2-a2c6-95605e023f94-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.661920 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.661936 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.661974 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxbpr\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-kube-api-access-gxbpr\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.661990 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-config-data\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.662007 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.662024 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.662048 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.662087 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6cf0ebea-06fc-47b2-a2c6-95605e023f94-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.662847 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.663226 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.663327 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.663406 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-config-data\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.663494 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.664062 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.665904 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6cf0ebea-06fc-47b2-a2c6-95605e023f94-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.667831 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6cf0ebea-06fc-47b2-a2c6-95605e023f94-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.668754 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.674465 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.681126 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxbpr\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-kube-api-access-gxbpr\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.685337 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.739145 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.740737 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.746106 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-vpw8r" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.746139 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.746456 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.746511 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.746601 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.746862 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.746910 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.765648 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.818592 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.866272 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.866326 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvmtd\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-kube-api-access-hvmtd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.866405 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.866436 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.866465 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.866511 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.866558 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.866588 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.866645 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.866687 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.866719 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.968321 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.968375 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.968400 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.968415 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvmtd\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-kube-api-access-hvmtd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.968445 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.968464 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.968480 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.968514 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.968769 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.968794 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.968812 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.968975 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.970185 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.970574 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.971398 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.971584 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.974171 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.974637 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.974964 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.978282 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.983195 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.986426 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvmtd\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-kube-api-access-hvmtd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:38 crc kubenswrapper[4730]: I0930 10:05:38.990939 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.007005 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.008486 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.011192 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-erlang-cookie" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.011451 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-plugins-conf" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.015010 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-notifications-svc" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.015178 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-server-dockercfg-9csdm" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.015303 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-config-data" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.015412 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-notifications-default-user" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.015605 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-notifications-server-conf" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.023724 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.076163 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.171490 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7ba6b518-edfa-4d19-b096-03d7d96c51a3-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.171559 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7ba6b518-edfa-4d19-b096-03d7d96c51a3-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.171671 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.171699 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7ba6b518-edfa-4d19-b096-03d7d96c51a3-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.171725 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7ba6b518-edfa-4d19-b096-03d7d96c51a3-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.171772 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7ba6b518-edfa-4d19-b096-03d7d96c51a3-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.171811 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7ba6b518-edfa-4d19-b096-03d7d96c51a3-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.171910 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7ba6b518-edfa-4d19-b096-03d7d96c51a3-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.172030 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7ba6b518-edfa-4d19-b096-03d7d96c51a3-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.172052 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7ba6b518-edfa-4d19-b096-03d7d96c51a3-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.172153 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbpph\" (UniqueName: \"kubernetes.io/projected/7ba6b518-edfa-4d19-b096-03d7d96c51a3-kube-api-access-pbpph\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.274239 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7ba6b518-edfa-4d19-b096-03d7d96c51a3-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.274324 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7ba6b518-edfa-4d19-b096-03d7d96c51a3-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.274402 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.274433 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7ba6b518-edfa-4d19-b096-03d7d96c51a3-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.274459 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7ba6b518-edfa-4d19-b096-03d7d96c51a3-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.274507 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7ba6b518-edfa-4d19-b096-03d7d96c51a3-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.274532 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7ba6b518-edfa-4d19-b096-03d7d96c51a3-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.274565 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7ba6b518-edfa-4d19-b096-03d7d96c51a3-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.274636 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7ba6b518-edfa-4d19-b096-03d7d96c51a3-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.274668 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7ba6b518-edfa-4d19-b096-03d7d96c51a3-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.274696 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbpph\" (UniqueName: \"kubernetes.io/projected/7ba6b518-edfa-4d19-b096-03d7d96c51a3-kube-api-access-pbpph\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.274873 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.275650 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7ba6b518-edfa-4d19-b096-03d7d96c51a3-rabbitmq-plugins\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.276242 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7ba6b518-edfa-4d19-b096-03d7d96c51a3-config-data\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.276319 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7ba6b518-edfa-4d19-b096-03d7d96c51a3-server-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.277830 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7ba6b518-edfa-4d19-b096-03d7d96c51a3-rabbitmq-erlang-cookie\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.279507 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7ba6b518-edfa-4d19-b096-03d7d96c51a3-pod-info\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.279888 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7ba6b518-edfa-4d19-b096-03d7d96c51a3-plugins-conf\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.282392 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7ba6b518-edfa-4d19-b096-03d7d96c51a3-rabbitmq-tls\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.282770 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7ba6b518-edfa-4d19-b096-03d7d96c51a3-erlang-cookie-secret\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.296519 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.311465 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7ba6b518-edfa-4d19-b096-03d7d96c51a3-rabbitmq-confd\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.327194 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbpph\" (UniqueName: \"kubernetes.io/projected/7ba6b518-edfa-4d19-b096-03d7d96c51a3-kube-api-access-pbpph\") pod \"rabbitmq-notifications-server-0\" (UID: \"7ba6b518-edfa-4d19-b096-03d7d96c51a3\") " pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:39 crc kubenswrapper[4730]: I0930 10:05:39.368107 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.868110 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.879660 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.879769 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.883436 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-75crf" Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.883487 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.883700 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.883872 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.883883 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.900775 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.979937 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.981220 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.983519 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.985599 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.985790 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.986325 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-7jld8" Sep 30 10:05:41 crc kubenswrapper[4730]: I0930 10:05:41.992822 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.027326 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.027383 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.027413 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-config-data-default\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.027456 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-secrets\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.027489 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.027547 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.027577 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tj277\" (UniqueName: \"kubernetes.io/projected/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-kube-api-access-tj277\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.027604 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-kolla-config\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.027709 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129217 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129263 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129291 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129315 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129338 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129368 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129425 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129553 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129604 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129670 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-config-data-default\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129784 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-secrets\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129821 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129861 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-config-data-generated\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129864 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rgt6\" (UniqueName: \"kubernetes.io/projected/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-kube-api-access-8rgt6\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.129973 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.130010 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.130030 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tj277\" (UniqueName: \"kubernetes.io/projected/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-kube-api-access-tj277\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.130050 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-kolla-config\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.130066 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.130335 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.130743 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-config-data-default\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.131512 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-kolla-config\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.132424 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-operator-scripts\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.139444 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.147682 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.149392 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tj277\" (UniqueName: \"kubernetes.io/projected/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-kube-api-access-tj277\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.150370 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1-secrets\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.171772 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1\") " pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.205354 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.230897 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.230949 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rgt6\" (UniqueName: \"kubernetes.io/projected/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-kube-api-access-8rgt6\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.231008 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.231026 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.231044 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.231061 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.231079 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.231103 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.231128 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.231667 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.231787 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.232190 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.233530 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.234029 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.237179 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.238225 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.252298 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.254165 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rgt6\" (UniqueName: \"kubernetes.io/projected/c1e0179d-6dc3-4dec-8ff6-48c794add5a3-kube-api-access-8rgt6\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.259390 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"c1e0179d-6dc3-4dec-8ff6-48c794add5a3\") " pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.317079 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.369388 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.371590 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.384782 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.384970 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-7vz2w" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.385005 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.404791 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.535999 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7ad8423-527a-4195-8e63-d04e2bed66c9-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.536082 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c7ad8423-527a-4195-8e63-d04e2bed66c9-config-data\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.536112 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbttb\" (UniqueName: \"kubernetes.io/projected/c7ad8423-527a-4195-8e63-d04e2bed66c9-kube-api-access-wbttb\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.536267 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ad8423-527a-4195-8e63-d04e2bed66c9-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.536365 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c7ad8423-527a-4195-8e63-d04e2bed66c9-kolla-config\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.637486 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ad8423-527a-4195-8e63-d04e2bed66c9-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.637943 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c7ad8423-527a-4195-8e63-d04e2bed66c9-kolla-config\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.638015 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7ad8423-527a-4195-8e63-d04e2bed66c9-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.638050 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c7ad8423-527a-4195-8e63-d04e2bed66c9-config-data\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.638073 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbttb\" (UniqueName: \"kubernetes.io/projected/c7ad8423-527a-4195-8e63-d04e2bed66c9-kube-api-access-wbttb\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.638764 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c7ad8423-527a-4195-8e63-d04e2bed66c9-kolla-config\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.638882 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c7ad8423-527a-4195-8e63-d04e2bed66c9-config-data\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.643166 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ad8423-527a-4195-8e63-d04e2bed66c9-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.645044 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7ad8423-527a-4195-8e63-d04e2bed66c9-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.656890 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbttb\" (UniqueName: \"kubernetes.io/projected/c7ad8423-527a-4195-8e63-d04e2bed66c9-kube-api-access-wbttb\") pod \"memcached-0\" (UID: \"c7ad8423-527a-4195-8e63-d04e2bed66c9\") " pod="openstack/memcached-0" Sep 30 10:05:42 crc kubenswrapper[4730]: I0930 10:05:42.712538 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 10:05:43 crc kubenswrapper[4730]: I0930 10:05:43.961690 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 10:05:43 crc kubenswrapper[4730]: I0930 10:05:43.963041 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 10:05:43 crc kubenswrapper[4730]: I0930 10:05:43.966340 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-rbnkq" Sep 30 10:05:43 crc kubenswrapper[4730]: I0930 10:05:43.969472 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 10:05:44 crc kubenswrapper[4730]: I0930 10:05:44.069842 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdpr9\" (UniqueName: \"kubernetes.io/projected/e1641d17-bb96-477c-ae5f-39f8a1da719c-kube-api-access-gdpr9\") pod \"kube-state-metrics-0\" (UID: \"e1641d17-bb96-477c-ae5f-39f8a1da719c\") " pod="openstack/kube-state-metrics-0" Sep 30 10:05:44 crc kubenswrapper[4730]: I0930 10:05:44.173624 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdpr9\" (UniqueName: \"kubernetes.io/projected/e1641d17-bb96-477c-ae5f-39f8a1da719c-kube-api-access-gdpr9\") pod \"kube-state-metrics-0\" (UID: \"e1641d17-bb96-477c-ae5f-39f8a1da719c\") " pod="openstack/kube-state-metrics-0" Sep 30 10:05:44 crc kubenswrapper[4730]: I0930 10:05:44.193066 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdpr9\" (UniqueName: \"kubernetes.io/projected/e1641d17-bb96-477c-ae5f-39f8a1da719c-kube-api-access-gdpr9\") pod \"kube-state-metrics-0\" (UID: \"e1641d17-bb96-477c-ae5f-39f8a1da719c\") " pod="openstack/kube-state-metrics-0" Sep 30 10:05:44 crc kubenswrapper[4730]: I0930 10:05:44.280059 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.195060 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.197968 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.201360 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.201550 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.201560 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.201659 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.203487 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-vs854" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.214360 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.219416 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.288960 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9f762\" (UniqueName: \"kubernetes.io/projected/57094072-3915-41c8-a4f8-35960aa068c4-kube-api-access-9f762\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.289044 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.289081 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-config\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.289105 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/57094072-3915-41c8-a4f8-35960aa068c4-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.289131 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.289554 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/57094072-3915-41c8-a4f8-35960aa068c4-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.289587 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.289655 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/57094072-3915-41c8-a4f8-35960aa068c4-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.391233 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9f762\" (UniqueName: \"kubernetes.io/projected/57094072-3915-41c8-a4f8-35960aa068c4-kube-api-access-9f762\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.391298 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.391321 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-config\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.391340 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/57094072-3915-41c8-a4f8-35960aa068c4-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.391360 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.391409 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/57094072-3915-41c8-a4f8-35960aa068c4-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.391435 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.392110 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/57094072-3915-41c8-a4f8-35960aa068c4-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.393530 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/57094072-3915-41c8-a4f8-35960aa068c4-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.401762 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.401856 4730 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.401893 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/cf539b04350c5d85f90d0468a4b3f4f72d24a709bb4a2121a25d26c6e8fc960c/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.405373 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/57094072-3915-41c8-a4f8-35960aa068c4-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.405813 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/57094072-3915-41c8-a4f8-35960aa068c4-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.406248 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.406991 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-config\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.430440 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9f762\" (UniqueName: \"kubernetes.io/projected/57094072-3915-41c8-a4f8-35960aa068c4-kube-api-access-9f762\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.496467 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"prometheus-metric-storage-0\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:45 crc kubenswrapper[4730]: I0930 10:05:45.526046 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.330195 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4tlvb"] Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.333191 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.337316 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-d2qlf" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.337427 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.337465 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.339702 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4tlvb"] Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.345023 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-wb9fw"] Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.346511 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.381462 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-wb9fw"] Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.426332 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/43c558ac-76c0-4c01-a265-41320a386add-var-log\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.426372 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n689f\" (UniqueName: \"kubernetes.io/projected/43c558ac-76c0-4c01-a265-41320a386add-kube-api-access-n689f\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.426446 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d247bfe5-48d7-49be-9cd4-2d3368015e3a-combined-ca-bundle\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.426623 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jml9\" (UniqueName: \"kubernetes.io/projected/d247bfe5-48d7-49be-9cd4-2d3368015e3a-kube-api-access-2jml9\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.426673 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d247bfe5-48d7-49be-9cd4-2d3368015e3a-var-log-ovn\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.426711 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/43c558ac-76c0-4c01-a265-41320a386add-etc-ovs\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.426809 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/43c558ac-76c0-4c01-a265-41320a386add-var-lib\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.426868 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/d247bfe5-48d7-49be-9cd4-2d3368015e3a-ovn-controller-tls-certs\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.426897 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/43c558ac-76c0-4c01-a265-41320a386add-scripts\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.426923 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d247bfe5-48d7-49be-9cd4-2d3368015e3a-scripts\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.426992 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d247bfe5-48d7-49be-9cd4-2d3368015e3a-var-run\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.427019 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/43c558ac-76c0-4c01-a265-41320a386add-var-run\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.427166 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d247bfe5-48d7-49be-9cd4-2d3368015e3a-var-run-ovn\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.530064 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/43c558ac-76c0-4c01-a265-41320a386add-var-lib\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.530120 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/d247bfe5-48d7-49be-9cd4-2d3368015e3a-ovn-controller-tls-certs\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.530138 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/43c558ac-76c0-4c01-a265-41320a386add-scripts\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.530159 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d247bfe5-48d7-49be-9cd4-2d3368015e3a-scripts\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.530195 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d247bfe5-48d7-49be-9cd4-2d3368015e3a-var-run\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.530212 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/43c558ac-76c0-4c01-a265-41320a386add-var-run\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.530281 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d247bfe5-48d7-49be-9cd4-2d3368015e3a-var-run-ovn\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.530329 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/43c558ac-76c0-4c01-a265-41320a386add-var-log\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.530343 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n689f\" (UniqueName: \"kubernetes.io/projected/43c558ac-76c0-4c01-a265-41320a386add-kube-api-access-n689f\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.530390 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d247bfe5-48d7-49be-9cd4-2d3368015e3a-combined-ca-bundle\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.530440 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jml9\" (UniqueName: \"kubernetes.io/projected/d247bfe5-48d7-49be-9cd4-2d3368015e3a-kube-api-access-2jml9\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.530457 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d247bfe5-48d7-49be-9cd4-2d3368015e3a-var-log-ovn\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.530474 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/43c558ac-76c0-4c01-a265-41320a386add-etc-ovs\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.531289 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/43c558ac-76c0-4c01-a265-41320a386add-var-lib\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.531435 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d247bfe5-48d7-49be-9cd4-2d3368015e3a-var-run\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.531942 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/43c558ac-76c0-4c01-a265-41320a386add-var-log\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.532661 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d247bfe5-48d7-49be-9cd4-2d3368015e3a-var-log-ovn\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.532784 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/43c558ac-76c0-4c01-a265-41320a386add-etc-ovs\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.533835 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/43c558ac-76c0-4c01-a265-41320a386add-var-run\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.534038 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d247bfe5-48d7-49be-9cd4-2d3368015e3a-var-run-ovn\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.534177 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d247bfe5-48d7-49be-9cd4-2d3368015e3a-scripts\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.535392 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/43c558ac-76c0-4c01-a265-41320a386add-scripts\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.561446 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/d247bfe5-48d7-49be-9cd4-2d3368015e3a-ovn-controller-tls-certs\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.564938 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d247bfe5-48d7-49be-9cd4-2d3368015e3a-combined-ca-bundle\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.565031 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n689f\" (UniqueName: \"kubernetes.io/projected/43c558ac-76c0-4c01-a265-41320a386add-kube-api-access-n689f\") pod \"ovn-controller-ovs-wb9fw\" (UID: \"43c558ac-76c0-4c01-a265-41320a386add\") " pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.566431 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jml9\" (UniqueName: \"kubernetes.io/projected/d247bfe5-48d7-49be-9cd4-2d3368015e3a-kube-api-access-2jml9\") pod \"ovn-controller-4tlvb\" (UID: \"d247bfe5-48d7-49be-9cd4-2d3368015e3a\") " pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.693172 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4tlvb" Sep 30 10:05:47 crc kubenswrapper[4730]: I0930 10:05:47.700769 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.233248 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.235727 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.239940 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.244137 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.244207 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.244249 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.255845 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-gccx8" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.278603 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.343683 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d84e120e-743b-4797-aa0a-e231ecfa59ab-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.344013 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d84e120e-743b-4797-aa0a-e231ecfa59ab-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.344053 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d84e120e-743b-4797-aa0a-e231ecfa59ab-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.344075 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.344093 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d84e120e-743b-4797-aa0a-e231ecfa59ab-config\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.344128 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d84e120e-743b-4797-aa0a-e231ecfa59ab-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.344154 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5snh\" (UniqueName: \"kubernetes.io/projected/d84e120e-743b-4797-aa0a-e231ecfa59ab-kube-api-access-g5snh\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.344187 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d84e120e-743b-4797-aa0a-e231ecfa59ab-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.446132 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.446181 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d84e120e-743b-4797-aa0a-e231ecfa59ab-config\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.446228 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d84e120e-743b-4797-aa0a-e231ecfa59ab-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.446287 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5snh\" (UniqueName: \"kubernetes.io/projected/d84e120e-743b-4797-aa0a-e231ecfa59ab-kube-api-access-g5snh\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.446358 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d84e120e-743b-4797-aa0a-e231ecfa59ab-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.446468 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d84e120e-743b-4797-aa0a-e231ecfa59ab-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.446569 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d84e120e-743b-4797-aa0a-e231ecfa59ab-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.446636 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d84e120e-743b-4797-aa0a-e231ecfa59ab-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.448153 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.449803 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d84e120e-743b-4797-aa0a-e231ecfa59ab-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.450207 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d84e120e-743b-4797-aa0a-e231ecfa59ab-config\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.450881 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d84e120e-743b-4797-aa0a-e231ecfa59ab-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.453597 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d84e120e-743b-4797-aa0a-e231ecfa59ab-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.453641 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d84e120e-743b-4797-aa0a-e231ecfa59ab-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.453752 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d84e120e-743b-4797-aa0a-e231ecfa59ab-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.468549 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.494500 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5snh\" (UniqueName: \"kubernetes.io/projected/d84e120e-743b-4797-aa0a-e231ecfa59ab-kube-api-access-g5snh\") pod \"ovsdbserver-nb-0\" (UID: \"d84e120e-743b-4797-aa0a-e231ecfa59ab\") " pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:48 crc kubenswrapper[4730]: I0930 10:05:48.560074 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 10:05:50 crc kubenswrapper[4730]: I0930 10:05:50.693778 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-notifications-server-0"] Sep 30 10:05:51 crc kubenswrapper[4730]: E0930 10:05:51.189673 4730 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.176:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Sep 30 10:05:51 crc kubenswrapper[4730]: E0930 10:05:51.189738 4730 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.176:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Sep 30 10:05:51 crc kubenswrapper[4730]: E0930 10:05:51.189905 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.176:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gnhsr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-6c58b4c7b9-pcd2w_openstack(1536e158-ee29-48ad-88d6-9e6d148085e7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 10:05:51 crc kubenswrapper[4730]: E0930 10:05:51.191738 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-6c58b4c7b9-pcd2w" podUID="1536e158-ee29-48ad-88d6-9e6d148085e7" Sep 30 10:05:51 crc kubenswrapper[4730]: W0930 10:05:51.196402 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ba6b518_edfa_4d19_b096_03d7d96c51a3.slice/crio-772c41f3162cd3aa237fe9703ce495b4afa72fce9cf995577c47d599fbea2862 WatchSource:0}: Error finding container 772c41f3162cd3aa237fe9703ce495b4afa72fce9cf995577c47d599fbea2862: Status 404 returned error can't find the container with id 772c41f3162cd3aa237fe9703ce495b4afa72fce9cf995577c47d599fbea2862 Sep 30 10:05:51 crc kubenswrapper[4730]: E0930 10:05:51.237911 4730 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.176:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Sep 30 10:05:51 crc kubenswrapper[4730]: E0930 10:05:51.237968 4730 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.176:5001/podified-master-centos10/openstack-neutron-server:watcher_latest" Sep 30 10:05:51 crc kubenswrapper[4730]: E0930 10:05:51.238089 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:38.102.83.176:5001/podified-master-centos10/openstack-neutron-server:watcher_latest,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zfxnn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7dff86fb65-5r2md_openstack(5c8b83f7-40a2-4215-b77d-828bc2ac8a44): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 10:05:51 crc kubenswrapper[4730]: E0930 10:05:51.239427 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" podUID="5c8b83f7-40a2-4215-b77d-828bc2ac8a44" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.478111 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.479914 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.481775 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.481879 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-ptbvs" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.482003 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.484750 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.493185 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.600584 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.600765 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.601125 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6vsp\" (UniqueName: \"kubernetes.io/projected/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-kube-api-access-m6vsp\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.601209 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.601372 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.601403 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-config\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.601477 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.601539 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.639859 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"7ba6b518-edfa-4d19-b096-03d7d96c51a3","Type":"ContainerStarted","Data":"772c41f3162cd3aa237fe9703ce495b4afa72fce9cf995577c47d599fbea2862"} Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.703386 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.703419 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-config\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.703451 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.703470 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.703508 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.703536 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.703570 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6vsp\" (UniqueName: \"kubernetes.io/projected/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-kube-api-access-m6vsp\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.703591 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.705175 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.705879 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.707153 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-config\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.707928 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.714462 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.720017 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.724280 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.725960 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6vsp\" (UniqueName: \"kubernetes.io/projected/5e6063f5-43cd-45ec-9ac3-4de0fd55cb15-kube-api-access-m6vsp\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.757290 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15\") " pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:51 crc kubenswrapper[4730]: I0930 10:05:51.836667 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.322066 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.328546 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c58b4c7b9-pcd2w" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.417988 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnhsr\" (UniqueName: \"kubernetes.io/projected/1536e158-ee29-48ad-88d6-9e6d148085e7-kube-api-access-gnhsr\") pod \"1536e158-ee29-48ad-88d6-9e6d148085e7\" (UID: \"1536e158-ee29-48ad-88d6-9e6d148085e7\") " Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.418086 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-dns-svc\") pod \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\" (UID: \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\") " Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.418168 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-config\") pod \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\" (UID: \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\") " Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.418222 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfxnn\" (UniqueName: \"kubernetes.io/projected/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-kube-api-access-zfxnn\") pod \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\" (UID: \"5c8b83f7-40a2-4215-b77d-828bc2ac8a44\") " Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.418262 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1536e158-ee29-48ad-88d6-9e6d148085e7-config\") pod \"1536e158-ee29-48ad-88d6-9e6d148085e7\" (UID: \"1536e158-ee29-48ad-88d6-9e6d148085e7\") " Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.419142 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1536e158-ee29-48ad-88d6-9e6d148085e7-config" (OuterVolumeSpecName: "config") pod "1536e158-ee29-48ad-88d6-9e6d148085e7" (UID: "1536e158-ee29-48ad-88d6-9e6d148085e7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.419175 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-config" (OuterVolumeSpecName: "config") pod "5c8b83f7-40a2-4215-b77d-828bc2ac8a44" (UID: "5c8b83f7-40a2-4215-b77d-828bc2ac8a44"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.419601 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5c8b83f7-40a2-4215-b77d-828bc2ac8a44" (UID: "5c8b83f7-40a2-4215-b77d-828bc2ac8a44"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.421995 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1536e158-ee29-48ad-88d6-9e6d148085e7-kube-api-access-gnhsr" (OuterVolumeSpecName: "kube-api-access-gnhsr") pod "1536e158-ee29-48ad-88d6-9e6d148085e7" (UID: "1536e158-ee29-48ad-88d6-9e6d148085e7"). InnerVolumeSpecName "kube-api-access-gnhsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.427290 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-kube-api-access-zfxnn" (OuterVolumeSpecName: "kube-api-access-zfxnn") pod "5c8b83f7-40a2-4215-b77d-828bc2ac8a44" (UID: "5c8b83f7-40a2-4215-b77d-828bc2ac8a44"). InnerVolumeSpecName "kube-api-access-zfxnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.476121 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.487696 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.501704 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-654794fb59-cf4b6"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.511628 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.519578 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfxnn\" (UniqueName: \"kubernetes.io/projected/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-kube-api-access-zfxnn\") on node \"crc\" DevicePath \"\"" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.519634 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1536e158-ee29-48ad-88d6-9e6d148085e7-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.519648 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnhsr\" (UniqueName: \"kubernetes.io/projected/1536e158-ee29-48ad-88d6-9e6d148085e7-kube-api-access-gnhsr\") on node \"crc\" DevicePath \"\"" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.519660 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.519672 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c8b83f7-40a2-4215-b77d-828bc2ac8a44-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.553297 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4tlvb"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.579233 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.680789 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.722389 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" event={"ID":"5c8b83f7-40a2-4215-b77d-828bc2ac8a44","Type":"ContainerDied","Data":"7ab03fe9fad9717129531a7f7644c337161073b155cac983cd864a6d6daa9c72"} Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.725508 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7dff86fb65-5r2md" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.727638 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-654794fb59-cf4b6" event={"ID":"635201a1-9967-46a0-8561-14d9d70d0c6c","Type":"ContainerStarted","Data":"37a6c925df73fd273a4336da53a6ee4448ec403399d475845154dae4e9c61914"} Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.728750 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"57094072-3915-41c8-a4f8-35960aa068c4","Type":"ContainerStarted","Data":"a0165cb59e49af8f0985cbb559260514f3b1f8868cf3e8d151e84890a69ddc40"} Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.734854 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.739028 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-665d695fd9-t78qs"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.742441 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2","Type":"ContainerStarted","Data":"fc1a13fe9fca3e3222438c2d55d6157e75f25172e865cc42ed30d2d1ee6f6aea"} Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.743273 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1","Type":"ContainerStarted","Data":"92c499a20a6f2436f87cb090a4c449713fb1c3580dd05e877b081f8c8b794e95"} Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.745511 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c58b4c7b9-pcd2w" event={"ID":"1536e158-ee29-48ad-88d6-9e6d148085e7","Type":"ContainerDied","Data":"b9467c5c6abc803b051d6d3f490a6f0693ab27679151f7f8300f92614efd2c3f"} Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.745578 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c58b4c7b9-pcd2w" Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.752175 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6cf0ebea-06fc-47b2-a2c6-95605e023f94","Type":"ContainerStarted","Data":"35dc2cf71bef2e853e09a15d73b9ba802d9331ebf376bbb6c4e6993a21e146fd"} Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.780510 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4tlvb" event={"ID":"d247bfe5-48d7-49be-9cd4-2d3368015e3a","Type":"ContainerStarted","Data":"0deadabec062296cb5f06529966fb2db32eed14aa69504bd7cdc8b2da3f3578e"} Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.786622 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-db978b4d7-2wdwq"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.807483 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e1641d17-bb96-477c-ae5f-39f8a1da719c","Type":"ContainerStarted","Data":"f8cf98abb9b07b3646be434e3f653c67360997eb3f71110ee9cc3913fc3f4c7b"} Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.817257 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c58b4c7b9-pcd2w"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.821173 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c58b4c7b9-pcd2w"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.821305 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c7ad8423-527a-4195-8e63-d04e2bed66c9","Type":"ContainerStarted","Data":"313308be9d029709823929403d4997744adbdd30548f3320fb8ccbe9bbcb2af2"} Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.898300 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7dff86fb65-5r2md"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.905093 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7dff86fb65-5r2md"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.929395 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 10:05:52 crc kubenswrapper[4730]: I0930 10:05:52.971420 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 10:05:52 crc kubenswrapper[4730]: W0930 10:05:52.978380 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1e0179d_6dc3_4dec_8ff6_48c794add5a3.slice/crio-61bdba93c65a21e26e5f81eb4f3f34c1b71bd149c1284695697efca0315ef1b2 WatchSource:0}: Error finding container 61bdba93c65a21e26e5f81eb4f3f34c1b71bd149c1284695697efca0315ef1b2: Status 404 returned error can't find the container with id 61bdba93c65a21e26e5f81eb4f3f34c1b71bd149c1284695697efca0315ef1b2 Sep 30 10:05:52 crc kubenswrapper[4730]: W0930 10:05:52.991218 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd84e120e_743b_4797_aa0a_e231ecfa59ab.slice/crio-25692c3739c89eb012bf1dc24a4f5c821df58e346cb8cedee513e54e113e451f WatchSource:0}: Error finding container 25692c3739c89eb012bf1dc24a4f5c821df58e346cb8cedee513e54e113e451f: Status 404 returned error can't find the container with id 25692c3739c89eb012bf1dc24a4f5c821df58e346cb8cedee513e54e113e451f Sep 30 10:05:53 crc kubenswrapper[4730]: I0930 10:05:53.054575 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-wb9fw"] Sep 30 10:05:53 crc kubenswrapper[4730]: I0930 10:05:53.829001 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 10:05:53 crc kubenswrapper[4730]: I0930 10:05:53.830925 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"d84e120e-743b-4797-aa0a-e231ecfa59ab","Type":"ContainerStarted","Data":"25692c3739c89eb012bf1dc24a4f5c821df58e346cb8cedee513e54e113e451f"} Sep 30 10:05:53 crc kubenswrapper[4730]: I0930 10:05:53.832509 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665d695fd9-t78qs" event={"ID":"08995951-fead-4afd-a764-3a6963d8c7df","Type":"ContainerStarted","Data":"661ce8a21641a79f6ee2d47999b23bf7575e8b75d77d17c86084ee3c654503c7"} Sep 30 10:05:53 crc kubenswrapper[4730]: I0930 10:05:53.834473 4730 generic.go:334] "Generic (PLEG): container finished" podID="635201a1-9967-46a0-8561-14d9d70d0c6c" containerID="75ec31386f89b9acd7913cb35a9db18a9e811ff2e4ee1a000d8249370abe0799" exitCode=0 Sep 30 10:05:53 crc kubenswrapper[4730]: I0930 10:05:53.834544 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-654794fb59-cf4b6" event={"ID":"635201a1-9967-46a0-8561-14d9d70d0c6c","Type":"ContainerDied","Data":"75ec31386f89b9acd7913cb35a9db18a9e811ff2e4ee1a000d8249370abe0799"} Sep 30 10:05:53 crc kubenswrapper[4730]: I0930 10:05:53.836070 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c1e0179d-6dc3-4dec-8ff6-48c794add5a3","Type":"ContainerStarted","Data":"61bdba93c65a21e26e5f81eb4f3f34c1b71bd149c1284695697efca0315ef1b2"} Sep 30 10:05:53 crc kubenswrapper[4730]: I0930 10:05:53.838783 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" event={"ID":"7cec0bcd-db20-40ea-b4d6-f1e1578311b5","Type":"ContainerStarted","Data":"424e9e7cc807720741e1541b994969034867c159c0efaabf453e63479938e9b2"} Sep 30 10:05:54 crc kubenswrapper[4730]: I0930 10:05:54.391951 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1536e158-ee29-48ad-88d6-9e6d148085e7" path="/var/lib/kubelet/pods/1536e158-ee29-48ad-88d6-9e6d148085e7/volumes" Sep 30 10:05:54 crc kubenswrapper[4730]: I0930 10:05:54.393166 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c8b83f7-40a2-4215-b77d-828bc2ac8a44" path="/var/lib/kubelet/pods/5c8b83f7-40a2-4215-b77d-828bc2ac8a44/volumes" Sep 30 10:05:54 crc kubenswrapper[4730]: W0930 10:05:54.590792 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43c558ac_76c0_4c01_a265_41320a386add.slice/crio-3f3a0976e103075fe872e80287d32b346ad51eae0a09e27b18adbdede7587310 WatchSource:0}: Error finding container 3f3a0976e103075fe872e80287d32b346ad51eae0a09e27b18adbdede7587310: Status 404 returned error can't find the container with id 3f3a0976e103075fe872e80287d32b346ad51eae0a09e27b18adbdede7587310 Sep 30 10:05:54 crc kubenswrapper[4730]: I0930 10:05:54.851924 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wb9fw" event={"ID":"43c558ac-76c0-4c01-a265-41320a386add","Type":"ContainerStarted","Data":"3f3a0976e103075fe872e80287d32b346ad51eae0a09e27b18adbdede7587310"} Sep 30 10:05:55 crc kubenswrapper[4730]: W0930 10:05:55.082692 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e6063f5_43cd_45ec_9ac3_4de0fd55cb15.slice/crio-05750e894d1febf5e2dfbc68ed3658458a094755e77e134ffd8b0878195f050b WatchSource:0}: Error finding container 05750e894d1febf5e2dfbc68ed3658458a094755e77e134ffd8b0878195f050b: Status 404 returned error can't find the container with id 05750e894d1febf5e2dfbc68ed3658458a094755e77e134ffd8b0878195f050b Sep 30 10:05:55 crc kubenswrapper[4730]: I0930 10:05:55.862607 4730 generic.go:334] "Generic (PLEG): container finished" podID="7cec0bcd-db20-40ea-b4d6-f1e1578311b5" containerID="332910ff618ac3f4b86e3964a2559f14baf4033494f02a4a2555d6bda26d02b7" exitCode=0 Sep 30 10:05:55 crc kubenswrapper[4730]: I0930 10:05:55.862724 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" event={"ID":"7cec0bcd-db20-40ea-b4d6-f1e1578311b5","Type":"ContainerDied","Data":"332910ff618ac3f4b86e3964a2559f14baf4033494f02a4a2555d6bda26d02b7"} Sep 30 10:05:55 crc kubenswrapper[4730]: I0930 10:05:55.864159 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15","Type":"ContainerStarted","Data":"05750e894d1febf5e2dfbc68ed3658458a094755e77e134ffd8b0878195f050b"} Sep 30 10:05:58 crc kubenswrapper[4730]: I0930 10:05:58.886945 4730 generic.go:334] "Generic (PLEG): container finished" podID="08995951-fead-4afd-a764-3a6963d8c7df" containerID="25aa04c6310f730b0d0bad6e7fe380445d039ad6a36a8f4a7055b4d9161e4ad3" exitCode=0 Sep 30 10:05:58 crc kubenswrapper[4730]: I0930 10:05:58.887063 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665d695fd9-t78qs" event={"ID":"08995951-fead-4afd-a764-3a6963d8c7df","Type":"ContainerDied","Data":"25aa04c6310f730b0d0bad6e7fe380445d039ad6a36a8f4a7055b4d9161e4ad3"} Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.243011 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665d695fd9-t78qs" Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.317813 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08995951-fead-4afd-a764-3a6963d8c7df-config\") pod \"08995951-fead-4afd-a764-3a6963d8c7df\" (UID: \"08995951-fead-4afd-a764-3a6963d8c7df\") " Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.317917 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/08995951-fead-4afd-a764-3a6963d8c7df-dns-svc\") pod \"08995951-fead-4afd-a764-3a6963d8c7df\" (UID: \"08995951-fead-4afd-a764-3a6963d8c7df\") " Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.317951 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rz6z\" (UniqueName: \"kubernetes.io/projected/08995951-fead-4afd-a764-3a6963d8c7df-kube-api-access-6rz6z\") pod \"08995951-fead-4afd-a764-3a6963d8c7df\" (UID: \"08995951-fead-4afd-a764-3a6963d8c7df\") " Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.324645 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08995951-fead-4afd-a764-3a6963d8c7df-kube-api-access-6rz6z" (OuterVolumeSpecName: "kube-api-access-6rz6z") pod "08995951-fead-4afd-a764-3a6963d8c7df" (UID: "08995951-fead-4afd-a764-3a6963d8c7df"). InnerVolumeSpecName "kube-api-access-6rz6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.337112 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08995951-fead-4afd-a764-3a6963d8c7df-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "08995951-fead-4afd-a764-3a6963d8c7df" (UID: "08995951-fead-4afd-a764-3a6963d8c7df"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.337322 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08995951-fead-4afd-a764-3a6963d8c7df-config" (OuterVolumeSpecName: "config") pod "08995951-fead-4afd-a764-3a6963d8c7df" (UID: "08995951-fead-4afd-a764-3a6963d8c7df"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.420479 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08995951-fead-4afd-a764-3a6963d8c7df-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.420517 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/08995951-fead-4afd-a764-3a6963d8c7df-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.420531 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rz6z\" (UniqueName: \"kubernetes.io/projected/08995951-fead-4afd-a764-3a6963d8c7df-kube-api-access-6rz6z\") on node \"crc\" DevicePath \"\"" Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.896695 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665d695fd9-t78qs" event={"ID":"08995951-fead-4afd-a764-3a6963d8c7df","Type":"ContainerDied","Data":"661ce8a21641a79f6ee2d47999b23bf7575e8b75d77d17c86084ee3c654503c7"} Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.897096 4730 scope.go:117] "RemoveContainer" containerID="25aa04c6310f730b0d0bad6e7fe380445d039ad6a36a8f4a7055b4d9161e4ad3" Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.896736 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665d695fd9-t78qs" Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.902226 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-654794fb59-cf4b6" event={"ID":"635201a1-9967-46a0-8561-14d9d70d0c6c","Type":"ContainerStarted","Data":"cc4d6d696f0b2c2424e2d83548632b3c85eb39e49fc7fb669932f7467ec3909e"} Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.902311 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.905453 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" event={"ID":"7cec0bcd-db20-40ea-b4d6-f1e1578311b5","Type":"ContainerStarted","Data":"6e72d1708a9e0c3a2a49462c6a6637d642566b3c3a31e419d81fd8f072eda0e7"} Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.905906 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.931401 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-654794fb59-cf4b6" podStartSLOduration=22.791223493 podStartE2EDuration="22.93138582s" podCreationTimestamp="2025-09-30 10:05:37 +0000 UTC" firstStartedPulling="2025-09-30 10:05:52.721438162 +0000 UTC m=+997.054698155" lastFinishedPulling="2025-09-30 10:05:52.861600489 +0000 UTC m=+997.194860482" observedRunningTime="2025-09-30 10:05:59.921346124 +0000 UTC m=+1004.254606137" watchObservedRunningTime="2025-09-30 10:05:59.93138582 +0000 UTC m=+1004.264645803" Sep 30 10:05:59 crc kubenswrapper[4730]: I0930 10:05:59.944219 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" podStartSLOduration=22.94420144 podStartE2EDuration="22.94420144s" podCreationTimestamp="2025-09-30 10:05:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:05:59.939439504 +0000 UTC m=+1004.272699497" watchObservedRunningTime="2025-09-30 10:05:59.94420144 +0000 UTC m=+1004.277461433" Sep 30 10:06:00 crc kubenswrapper[4730]: I0930 10:06:00.052186 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-665d695fd9-t78qs"] Sep 30 10:06:00 crc kubenswrapper[4730]: I0930 10:06:00.063971 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-665d695fd9-t78qs"] Sep 30 10:06:00 crc kubenswrapper[4730]: I0930 10:06:00.393193 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08995951-fead-4afd-a764-3a6963d8c7df" path="/var/lib/kubelet/pods/08995951-fead-4afd-a764-3a6963d8c7df/volumes" Sep 30 10:06:00 crc kubenswrapper[4730]: I0930 10:06:00.914315 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1","Type":"ContainerStarted","Data":"d4c6e286daf85cd03ede4f607e40f298bc54d086398c6308db7965122ae3a266"} Sep 30 10:06:00 crc kubenswrapper[4730]: I0930 10:06:00.928487 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c1e0179d-6dc3-4dec-8ff6-48c794add5a3","Type":"ContainerStarted","Data":"764c4a5757829a281fb390a059ff2fe79e1f60a6f12d568a46caa9d442dc3493"} Sep 30 10:06:00 crc kubenswrapper[4730]: I0930 10:06:00.931325 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"7ba6b518-edfa-4d19-b096-03d7d96c51a3","Type":"ContainerStarted","Data":"fd068bf37f46b20a26b96b74ca949fe1204bb5f3453145a5f8f166a1b8c48c52"} Sep 30 10:06:00 crc kubenswrapper[4730]: I0930 10:06:00.952854 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4tlvb" event={"ID":"d247bfe5-48d7-49be-9cd4-2d3368015e3a","Type":"ContainerStarted","Data":"8aa1c82c39999b253406bb6f511e0df4113b34d67217e120be51d35121a4d5b8"} Sep 30 10:06:00 crc kubenswrapper[4730]: I0930 10:06:00.953843 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-4tlvb" Sep 30 10:06:00 crc kubenswrapper[4730]: I0930 10:06:00.959164 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c7ad8423-527a-4195-8e63-d04e2bed66c9","Type":"ContainerStarted","Data":"6d7bb8ae09a66468e088426b0963ccc1559d7c126d2c7da1900d7b7829e89205"} Sep 30 10:06:00 crc kubenswrapper[4730]: I0930 10:06:00.959219 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 30 10:06:00 crc kubenswrapper[4730]: I0930 10:06:00.959991 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2","Type":"ContainerStarted","Data":"8dafeeba9b157ce2800566bddda816b3d119847d03a7d6c84aff95e1c288798c"} Sep 30 10:06:01 crc kubenswrapper[4730]: I0930 10:06:01.010846 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=13.509045307 podStartE2EDuration="19.010830176s" podCreationTimestamp="2025-09-30 10:05:42 +0000 UTC" firstStartedPulling="2025-09-30 10:05:52.713390059 +0000 UTC m=+997.046650052" lastFinishedPulling="2025-09-30 10:05:58.215174938 +0000 UTC m=+1002.548434921" observedRunningTime="2025-09-30 10:06:01.007908488 +0000 UTC m=+1005.341168481" watchObservedRunningTime="2025-09-30 10:06:01.010830176 +0000 UTC m=+1005.344090169" Sep 30 10:06:01 crc kubenswrapper[4730]: I0930 10:06:01.033265 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-4tlvb" podStartSLOduration=8.000442258 podStartE2EDuration="14.03324317s" podCreationTimestamp="2025-09-30 10:05:47 +0000 UTC" firstStartedPulling="2025-09-30 10:05:52.683052124 +0000 UTC m=+997.016312107" lastFinishedPulling="2025-09-30 10:05:58.715853016 +0000 UTC m=+1003.049113019" observedRunningTime="2025-09-30 10:06:01.022740281 +0000 UTC m=+1005.356000274" watchObservedRunningTime="2025-09-30 10:06:01.03324317 +0000 UTC m=+1005.366503163" Sep 30 10:06:01 crc kubenswrapper[4730]: I0930 10:06:01.971930 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"d84e120e-743b-4797-aa0a-e231ecfa59ab","Type":"ContainerStarted","Data":"701e2e4d8528f5ad84e1cc190b24c3ff5a01ddda76fa5d2378c32bdc2a2591ad"} Sep 30 10:06:01 crc kubenswrapper[4730]: I0930 10:06:01.976395 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15","Type":"ContainerStarted","Data":"ecf6063376e7f838b42392447e6a180b5de9af9840e344359f3e08975c426f23"} Sep 30 10:06:01 crc kubenswrapper[4730]: I0930 10:06:01.979312 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6cf0ebea-06fc-47b2-a2c6-95605e023f94","Type":"ContainerStarted","Data":"587710b1ff8f9bce76f7bf39a64d961cf638fbe04d60ee8225504cae9015fcfb"} Sep 30 10:06:01 crc kubenswrapper[4730]: I0930 10:06:01.986836 4730 generic.go:334] "Generic (PLEG): container finished" podID="43c558ac-76c0-4c01-a265-41320a386add" containerID="5abb9ecf83ed6dbf9ccc476d9787fa351ba8e8e60cd41aefac75a514921e31f5" exitCode=0 Sep 30 10:06:01 crc kubenswrapper[4730]: I0930 10:06:01.986920 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wb9fw" event={"ID":"43c558ac-76c0-4c01-a265-41320a386add","Type":"ContainerDied","Data":"5abb9ecf83ed6dbf9ccc476d9787fa351ba8e8e60cd41aefac75a514921e31f5"} Sep 30 10:06:01 crc kubenswrapper[4730]: I0930 10:06:01.989666 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e1641d17-bb96-477c-ae5f-39f8a1da719c","Type":"ContainerStarted","Data":"9606eb7ce234a02673a3c68bea28caa4fc124f2b2960f22147b0ecec38d2efa8"} Sep 30 10:06:02 crc kubenswrapper[4730]: I0930 10:06:02.030095 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=10.976134294 podStartE2EDuration="19.030078355s" podCreationTimestamp="2025-09-30 10:05:43 +0000 UTC" firstStartedPulling="2025-09-30 10:05:52.637141986 +0000 UTC m=+996.970401979" lastFinishedPulling="2025-09-30 10:06:00.691086007 +0000 UTC m=+1005.024346040" observedRunningTime="2025-09-30 10:06:02.027428564 +0000 UTC m=+1006.360688557" watchObservedRunningTime="2025-09-30 10:06:02.030078355 +0000 UTC m=+1006.363338348" Sep 30 10:06:03 crc kubenswrapper[4730]: I0930 10:06:03.008948 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"57094072-3915-41c8-a4f8-35960aa068c4","Type":"ContainerStarted","Data":"0c0f6b2933de5606710159b77ef3a6f54c31f39cc388a86d17f05d1fc154d3a7"} Sep 30 10:06:03 crc kubenswrapper[4730]: I0930 10:06:03.012529 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wb9fw" event={"ID":"43c558ac-76c0-4c01-a265-41320a386add","Type":"ContainerStarted","Data":"ca2fee179c06f66ff3e859a32574e8d30c851f63fcbd449e8d2d16c5becefcc9"} Sep 30 10:06:03 crc kubenswrapper[4730]: I0930 10:06:03.012558 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-wb9fw" event={"ID":"43c558ac-76c0-4c01-a265-41320a386add","Type":"ContainerStarted","Data":"bce975432620a1ad2719a9949dbedc941fcfd8f6cf7929f01016b688bbe0327d"} Sep 30 10:06:03 crc kubenswrapper[4730]: I0930 10:06:03.012571 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 10:06:03 crc kubenswrapper[4730]: I0930 10:06:03.012583 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:06:03 crc kubenswrapper[4730]: I0930 10:06:03.012592 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:06:03 crc kubenswrapper[4730]: I0930 10:06:03.064777 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-wb9fw" podStartSLOduration=11.939850476 podStartE2EDuration="16.064758953s" podCreationTimestamp="2025-09-30 10:05:47 +0000 UTC" firstStartedPulling="2025-09-30 10:05:54.592948432 +0000 UTC m=+998.926208425" lastFinishedPulling="2025-09-30 10:05:58.717856909 +0000 UTC m=+1003.051116902" observedRunningTime="2025-09-30 10:06:03.062663467 +0000 UTC m=+1007.395923480" watchObservedRunningTime="2025-09-30 10:06:03.064758953 +0000 UTC m=+1007.398018946" Sep 30 10:06:07 crc kubenswrapper[4730]: I0930 10:06:07.714009 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 30 10:06:07 crc kubenswrapper[4730]: I0930 10:06:07.935806 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:06:08 crc kubenswrapper[4730]: I0930 10:06:08.223212 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:06:08 crc kubenswrapper[4730]: I0930 10:06:08.293875 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-db978b4d7-2wdwq"] Sep 30 10:06:08 crc kubenswrapper[4730]: I0930 10:06:08.294127 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" podUID="7cec0bcd-db20-40ea-b4d6-f1e1578311b5" containerName="dnsmasq-dns" containerID="cri-o://6e72d1708a9e0c3a2a49462c6a6637d642566b3c3a31e419d81fd8f072eda0e7" gracePeriod=10 Sep 30 10:06:09 crc kubenswrapper[4730]: I0930 10:06:09.060317 4730 generic.go:334] "Generic (PLEG): container finished" podID="7cec0bcd-db20-40ea-b4d6-f1e1578311b5" containerID="6e72d1708a9e0c3a2a49462c6a6637d642566b3c3a31e419d81fd8f072eda0e7" exitCode=0 Sep 30 10:06:09 crc kubenswrapper[4730]: I0930 10:06:09.060393 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" event={"ID":"7cec0bcd-db20-40ea-b4d6-f1e1578311b5","Type":"ContainerDied","Data":"6e72d1708a9e0c3a2a49462c6a6637d642566b3c3a31e419d81fd8f072eda0e7"} Sep 30 10:06:10 crc kubenswrapper[4730]: I0930 10:06:10.073936 4730 generic.go:334] "Generic (PLEG): container finished" podID="57094072-3915-41c8-a4f8-35960aa068c4" containerID="0c0f6b2933de5606710159b77ef3a6f54c31f39cc388a86d17f05d1fc154d3a7" exitCode=0 Sep 30 10:06:10 crc kubenswrapper[4730]: I0930 10:06:10.074021 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"57094072-3915-41c8-a4f8-35960aa068c4","Type":"ContainerDied","Data":"0c0f6b2933de5606710159b77ef3a6f54c31f39cc388a86d17f05d1fc154d3a7"} Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.102180 4730 generic.go:334] "Generic (PLEG): container finished" podID="28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1" containerID="d4c6e286daf85cd03ede4f607e40f298bc54d086398c6308db7965122ae3a266" exitCode=0 Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.102774 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1","Type":"ContainerDied","Data":"d4c6e286daf85cd03ede4f607e40f298bc54d086398c6308db7965122ae3a266"} Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.110918 4730 generic.go:334] "Generic (PLEG): container finished" podID="c1e0179d-6dc3-4dec-8ff6-48c794add5a3" containerID="764c4a5757829a281fb390a059ff2fe79e1f60a6f12d568a46caa9d442dc3493" exitCode=0 Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.111308 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c1e0179d-6dc3-4dec-8ff6-48c794add5a3","Type":"ContainerDied","Data":"764c4a5757829a281fb390a059ff2fe79e1f60a6f12d568a46caa9d442dc3493"} Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.440953 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.454181 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-config\") pod \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\" (UID: \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\") " Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.454252 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5v47r\" (UniqueName: \"kubernetes.io/projected/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-kube-api-access-5v47r\") pod \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\" (UID: \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\") " Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.454288 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-dns-svc\") pod \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\" (UID: \"7cec0bcd-db20-40ea-b4d6-f1e1578311b5\") " Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.461191 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-kube-api-access-5v47r" (OuterVolumeSpecName: "kube-api-access-5v47r") pod "7cec0bcd-db20-40ea-b4d6-f1e1578311b5" (UID: "7cec0bcd-db20-40ea-b4d6-f1e1578311b5"). InnerVolumeSpecName "kube-api-access-5v47r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.536329 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-config" (OuterVolumeSpecName: "config") pod "7cec0bcd-db20-40ea-b4d6-f1e1578311b5" (UID: "7cec0bcd-db20-40ea-b4d6-f1e1578311b5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.549260 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7cec0bcd-db20-40ea-b4d6-f1e1578311b5" (UID: "7cec0bcd-db20-40ea-b4d6-f1e1578311b5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.556470 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.556498 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5v47r\" (UniqueName: \"kubernetes.io/projected/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-kube-api-access-5v47r\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:11 crc kubenswrapper[4730]: I0930 10:06:11.556510 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cec0bcd-db20-40ea-b4d6-f1e1578311b5-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.121235 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1","Type":"ContainerStarted","Data":"0c3d0e959dcf5f59c47c8d1ec985fe4facea66a26a54e88995aa9cc65acc778a"} Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.124422 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"c1e0179d-6dc3-4dec-8ff6-48c794add5a3","Type":"ContainerStarted","Data":"55617cf360cd39f6897a4eb7141e77ac8a84acff826909b05ea3b1b762568f68"} Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.126407 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" event={"ID":"7cec0bcd-db20-40ea-b4d6-f1e1578311b5","Type":"ContainerDied","Data":"424e9e7cc807720741e1541b994969034867c159c0efaabf453e63479938e9b2"} Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.126442 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-db978b4d7-2wdwq" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.126453 4730 scope.go:117] "RemoveContainer" containerID="6e72d1708a9e0c3a2a49462c6a6637d642566b3c3a31e419d81fd8f072eda0e7" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.128478 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"d84e120e-743b-4797-aa0a-e231ecfa59ab","Type":"ContainerStarted","Data":"da4ce60762d5c8866c7536b2c45c3e9858a18ecda0914044fafc58d36285042e"} Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.130658 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5e6063f5-43cd-45ec-9ac3-4de0fd55cb15","Type":"ContainerStarted","Data":"d84c885a5290dbf01a3aa5fd522d40a390d50351a9fe2560ca186cfbd0374d6f"} Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.152075 4730 scope.go:117] "RemoveContainer" containerID="332910ff618ac3f4b86e3964a2559f14baf4033494f02a4a2555d6bda26d02b7" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.162483 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=26.160423877 podStartE2EDuration="32.162470233s" podCreationTimestamp="2025-09-30 10:05:40 +0000 UTC" firstStartedPulling="2025-09-30 10:05:52.71381288 +0000 UTC m=+997.047072873" lastFinishedPulling="2025-09-30 10:05:58.715859196 +0000 UTC m=+1003.049119229" observedRunningTime="2025-09-30 10:06:12.155495328 +0000 UTC m=+1016.488755331" watchObservedRunningTime="2025-09-30 10:06:12.162470233 +0000 UTC m=+1016.495730226" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.177459 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=26.479297272 podStartE2EDuration="32.177440289s" podCreationTimestamp="2025-09-30 10:05:40 +0000 UTC" firstStartedPulling="2025-09-30 10:05:53.018118679 +0000 UTC m=+997.351378662" lastFinishedPulling="2025-09-30 10:05:58.716261676 +0000 UTC m=+1003.049521679" observedRunningTime="2025-09-30 10:06:12.176285628 +0000 UTC m=+1016.509545641" watchObservedRunningTime="2025-09-30 10:06:12.177440289 +0000 UTC m=+1016.510700282" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.200022 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-db978b4d7-2wdwq"] Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.206641 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.207378 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.211693 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-db978b4d7-2wdwq"] Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.215447 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=5.8754186619999995 podStartE2EDuration="22.215430557s" podCreationTimestamp="2025-09-30 10:05:50 +0000 UTC" firstStartedPulling="2025-09-30 10:05:55.100920743 +0000 UTC m=+999.434180736" lastFinishedPulling="2025-09-30 10:06:11.440932638 +0000 UTC m=+1015.774192631" observedRunningTime="2025-09-30 10:06:12.207257541 +0000 UTC m=+1016.540517534" watchObservedRunningTime="2025-09-30 10:06:12.215430557 +0000 UTC m=+1016.548690550" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.233196 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=6.892347703 podStartE2EDuration="25.233178147s" podCreationTimestamp="2025-09-30 10:05:47 +0000 UTC" firstStartedPulling="2025-09-30 10:05:53.049998515 +0000 UTC m=+997.383258518" lastFinishedPulling="2025-09-30 10:06:11.390828969 +0000 UTC m=+1015.724088962" observedRunningTime="2025-09-30 10:06:12.229813459 +0000 UTC m=+1016.563073472" watchObservedRunningTime="2025-09-30 10:06:12.233178147 +0000 UTC m=+1016.566438150" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.318544 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.318576 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.392405 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cec0bcd-db20-40ea-b4d6-f1e1578311b5" path="/var/lib/kubelet/pods/7cec0bcd-db20-40ea-b4d6-f1e1578311b5/volumes" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.561213 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.598409 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.838017 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 30 10:06:12 crc kubenswrapper[4730]: I0930 10:06:12.879585 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.139135 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.139322 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.192192 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.198411 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.440904 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56bc8bbcf-qjwfg"] Sep 30 10:06:13 crc kubenswrapper[4730]: E0930 10:06:13.441556 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08995951-fead-4afd-a764-3a6963d8c7df" containerName="init" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.441575 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="08995951-fead-4afd-a764-3a6963d8c7df" containerName="init" Sep 30 10:06:13 crc kubenswrapper[4730]: E0930 10:06:13.441619 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cec0bcd-db20-40ea-b4d6-f1e1578311b5" containerName="init" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.441626 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cec0bcd-db20-40ea-b4d6-f1e1578311b5" containerName="init" Sep 30 10:06:13 crc kubenswrapper[4730]: E0930 10:06:13.441636 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cec0bcd-db20-40ea-b4d6-f1e1578311b5" containerName="dnsmasq-dns" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.441642 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cec0bcd-db20-40ea-b4d6-f1e1578311b5" containerName="dnsmasq-dns" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.441778 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="08995951-fead-4afd-a764-3a6963d8c7df" containerName="init" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.441800 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cec0bcd-db20-40ea-b4d6-f1e1578311b5" containerName="dnsmasq-dns" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.445881 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.448390 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.459153 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56bc8bbcf-qjwfg"] Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.508901 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-config\") pod \"dnsmasq-dns-56bc8bbcf-qjwfg\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.509020 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf5jl\" (UniqueName: \"kubernetes.io/projected/1abbe1e6-e825-4954-9384-ea5de2e62808-kube-api-access-mf5jl\") pod \"dnsmasq-dns-56bc8bbcf-qjwfg\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.509045 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-ovsdbserver-sb\") pod \"dnsmasq-dns-56bc8bbcf-qjwfg\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.509071 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-dns-svc\") pod \"dnsmasq-dns-56bc8bbcf-qjwfg\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.514148 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-zh8k5"] Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.517994 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.521214 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.539070 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-zh8k5"] Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.597819 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.599587 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.613806 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-2m545" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.613983 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.613997 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.614295 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.615049 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2z45\" (UniqueName: \"kubernetes.io/projected/f609f208-0734-4364-b05e-0364bded655e-kube-api-access-d2z45\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.615108 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf5jl\" (UniqueName: \"kubernetes.io/projected/1abbe1e6-e825-4954-9384-ea5de2e62808-kube-api-access-mf5jl\") pod \"dnsmasq-dns-56bc8bbcf-qjwfg\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.615149 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-ovsdbserver-sb\") pod \"dnsmasq-dns-56bc8bbcf-qjwfg\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.615195 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-dns-svc\") pod \"dnsmasq-dns-56bc8bbcf-qjwfg\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.615220 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f609f208-0734-4364-b05e-0364bded655e-ovn-rundir\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.615242 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f609f208-0734-4364-b05e-0364bded655e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.615273 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f609f208-0734-4364-b05e-0364bded655e-combined-ca-bundle\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.615322 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-config\") pod \"dnsmasq-dns-56bc8bbcf-qjwfg\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.615473 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f609f208-0734-4364-b05e-0364bded655e-ovs-rundir\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.615557 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f609f208-0734-4364-b05e-0364bded655e-config\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.616169 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-dns-svc\") pod \"dnsmasq-dns-56bc8bbcf-qjwfg\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.618414 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.622397 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-ovsdbserver-sb\") pod \"dnsmasq-dns-56bc8bbcf-qjwfg\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.626265 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-config\") pod \"dnsmasq-dns-56bc8bbcf-qjwfg\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.631819 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56bc8bbcf-qjwfg"] Sep 30 10:06:13 crc kubenswrapper[4730]: E0930 10:06:13.632373 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-mf5jl], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" podUID="1abbe1e6-e825-4954-9384-ea5de2e62808" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.650004 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf5jl\" (UniqueName: \"kubernetes.io/projected/1abbe1e6-e825-4954-9384-ea5de2e62808-kube-api-access-mf5jl\") pod \"dnsmasq-dns-56bc8bbcf-qjwfg\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.683451 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d87b9d675-rjngv"] Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.688728 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.693059 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d87b9d675-rjngv"] Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.698073 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.722564 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmd5c\" (UniqueName: \"kubernetes.io/projected/841c63b2-fda6-4269-8ba8-8567555326b4-kube-api-access-xmd5c\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.722622 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/841c63b2-fda6-4269-8ba8-8567555326b4-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.722667 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f609f208-0734-4364-b05e-0364bded655e-config\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.722697 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/841c63b2-fda6-4269-8ba8-8567555326b4-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.722722 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/841c63b2-fda6-4269-8ba8-8567555326b4-scripts\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.722741 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/841c63b2-fda6-4269-8ba8-8567555326b4-config\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.722785 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2z45\" (UniqueName: \"kubernetes.io/projected/f609f208-0734-4364-b05e-0364bded655e-kube-api-access-d2z45\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.722841 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f609f208-0734-4364-b05e-0364bded655e-ovn-rundir\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.722861 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f609f208-0734-4364-b05e-0364bded655e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.722878 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/841c63b2-fda6-4269-8ba8-8567555326b4-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.722900 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f609f208-0734-4364-b05e-0364bded655e-combined-ca-bundle\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.722932 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841c63b2-fda6-4269-8ba8-8567555326b4-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.722958 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f609f208-0734-4364-b05e-0364bded655e-ovs-rundir\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.723307 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f609f208-0734-4364-b05e-0364bded655e-ovs-rundir\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.724240 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f609f208-0734-4364-b05e-0364bded655e-config\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.724255 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f609f208-0734-4364-b05e-0364bded655e-ovn-rundir\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.732104 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f609f208-0734-4364-b05e-0364bded655e-combined-ca-bundle\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.754254 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2z45\" (UniqueName: \"kubernetes.io/projected/f609f208-0734-4364-b05e-0364bded655e-kube-api-access-d2z45\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.757149 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f609f208-0734-4364-b05e-0364bded655e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-zh8k5\" (UID: \"f609f208-0734-4364-b05e-0364bded655e\") " pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.825720 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqcwm\" (UniqueName: \"kubernetes.io/projected/75687951-20a4-47c3-a5f2-397cdb1953f4-kube-api-access-tqcwm\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.825779 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841c63b2-fda6-4269-8ba8-8567555326b4-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.825838 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-dns-svc\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.825893 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmd5c\" (UniqueName: \"kubernetes.io/projected/841c63b2-fda6-4269-8ba8-8567555326b4-kube-api-access-xmd5c\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.825915 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-config\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.825931 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/841c63b2-fda6-4269-8ba8-8567555326b4-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.825961 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/841c63b2-fda6-4269-8ba8-8567555326b4-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.825979 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/841c63b2-fda6-4269-8ba8-8567555326b4-scripts\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.825995 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/841c63b2-fda6-4269-8ba8-8567555326b4-config\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.826041 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-ovsdbserver-nb\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.826065 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-ovsdbserver-sb\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.826085 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/841c63b2-fda6-4269-8ba8-8567555326b4-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.826509 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/841c63b2-fda6-4269-8ba8-8567555326b4-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.827559 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/841c63b2-fda6-4269-8ba8-8567555326b4-config\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.827605 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/841c63b2-fda6-4269-8ba8-8567555326b4-scripts\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.830753 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/841c63b2-fda6-4269-8ba8-8567555326b4-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.831369 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841c63b2-fda6-4269-8ba8-8567555326b4-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.836806 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/841c63b2-fda6-4269-8ba8-8567555326b4-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.845071 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-zh8k5" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.850454 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmd5c\" (UniqueName: \"kubernetes.io/projected/841c63b2-fda6-4269-8ba8-8567555326b4-kube-api-access-xmd5c\") pod \"ovn-northd-0\" (UID: \"841c63b2-fda6-4269-8ba8-8567555326b4\") " pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.927457 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqcwm\" (UniqueName: \"kubernetes.io/projected/75687951-20a4-47c3-a5f2-397cdb1953f4-kube-api-access-tqcwm\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.927538 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-dns-svc\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.927572 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-config\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.927654 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-ovsdbserver-nb\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.927679 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-ovsdbserver-sb\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.928704 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-ovsdbserver-sb\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.930174 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-config\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.930235 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-dns-svc\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.930548 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-ovsdbserver-nb\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.937594 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 10:06:13 crc kubenswrapper[4730]: I0930 10:06:13.948595 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqcwm\" (UniqueName: \"kubernetes.io/projected/75687951-20a4-47c3-a5f2-397cdb1953f4-kube-api-access-tqcwm\") pod \"dnsmasq-dns-d87b9d675-rjngv\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.024546 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.146568 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.156655 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.231815 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-config\") pod \"1abbe1e6-e825-4954-9384-ea5de2e62808\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.231857 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-dns-svc\") pod \"1abbe1e6-e825-4954-9384-ea5de2e62808\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.231906 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mf5jl\" (UniqueName: \"kubernetes.io/projected/1abbe1e6-e825-4954-9384-ea5de2e62808-kube-api-access-mf5jl\") pod \"1abbe1e6-e825-4954-9384-ea5de2e62808\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.231952 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-ovsdbserver-sb\") pod \"1abbe1e6-e825-4954-9384-ea5de2e62808\" (UID: \"1abbe1e6-e825-4954-9384-ea5de2e62808\") " Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.232483 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1abbe1e6-e825-4954-9384-ea5de2e62808" (UID: "1abbe1e6-e825-4954-9384-ea5de2e62808"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.232854 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1abbe1e6-e825-4954-9384-ea5de2e62808" (UID: "1abbe1e6-e825-4954-9384-ea5de2e62808"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.233222 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.233236 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.233379 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-config" (OuterVolumeSpecName: "config") pod "1abbe1e6-e825-4954-9384-ea5de2e62808" (UID: "1abbe1e6-e825-4954-9384-ea5de2e62808"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.234874 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1abbe1e6-e825-4954-9384-ea5de2e62808-kube-api-access-mf5jl" (OuterVolumeSpecName: "kube-api-access-mf5jl") pod "1abbe1e6-e825-4954-9384-ea5de2e62808" (UID: "1abbe1e6-e825-4954-9384-ea5de2e62808"). InnerVolumeSpecName "kube-api-access-mf5jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.293838 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.334757 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1abbe1e6-e825-4954-9384-ea5de2e62808-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:14 crc kubenswrapper[4730]: I0930 10:06:14.334796 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mf5jl\" (UniqueName: \"kubernetes.io/projected/1abbe1e6-e825-4954-9384-ea5de2e62808-kube-api-access-mf5jl\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:15 crc kubenswrapper[4730]: I0930 10:06:15.161829 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bc8bbcf-qjwfg" Sep 30 10:06:15 crc kubenswrapper[4730]: I0930 10:06:15.220778 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56bc8bbcf-qjwfg"] Sep 30 10:06:15 crc kubenswrapper[4730]: I0930 10:06:15.234913 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56bc8bbcf-qjwfg"] Sep 30 10:06:16 crc kubenswrapper[4730]: I0930 10:06:16.390064 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1abbe1e6-e825-4954-9384-ea5de2e62808" path="/var/lib/kubelet/pods/1abbe1e6-e825-4954-9384-ea5de2e62808/volumes" Sep 30 10:06:16 crc kubenswrapper[4730]: I0930 10:06:16.455569 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d87b9d675-rjngv"] Sep 30 10:06:16 crc kubenswrapper[4730]: W0930 10:06:16.566923 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf609f208_0734_4364_b05e_0364bded655e.slice/crio-ccdbfb378643dc345f5b3b2c6a4760bbc369c52cfe69b5f31a01bb1f570d08f9 WatchSource:0}: Error finding container ccdbfb378643dc345f5b3b2c6a4760bbc369c52cfe69b5f31a01bb1f570d08f9: Status 404 returned error can't find the container with id ccdbfb378643dc345f5b3b2c6a4760bbc369c52cfe69b5f31a01bb1f570d08f9 Sep 30 10:06:16 crc kubenswrapper[4730]: W0930 10:06:16.571171 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod841c63b2_fda6_4269_8ba8_8567555326b4.slice/crio-020e3aab7c2588d440d07adf682fa56e2531eaef5823a49a0a2190435a15ec6d WatchSource:0}: Error finding container 020e3aab7c2588d440d07adf682fa56e2531eaef5823a49a0a2190435a15ec6d: Status 404 returned error can't find the container with id 020e3aab7c2588d440d07adf682fa56e2531eaef5823a49a0a2190435a15ec6d Sep 30 10:06:16 crc kubenswrapper[4730]: I0930 10:06:16.574258 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-zh8k5"] Sep 30 10:06:16 crc kubenswrapper[4730]: I0930 10:06:16.581464 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 10:06:17 crc kubenswrapper[4730]: I0930 10:06:17.182594 4730 generic.go:334] "Generic (PLEG): container finished" podID="75687951-20a4-47c3-a5f2-397cdb1953f4" containerID="f74c7acaa016be8cba95d1920acc16be85555d837c579c3435db1395bdad011d" exitCode=0 Sep 30 10:06:17 crc kubenswrapper[4730]: I0930 10:06:17.182669 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d87b9d675-rjngv" event={"ID":"75687951-20a4-47c3-a5f2-397cdb1953f4","Type":"ContainerDied","Data":"f74c7acaa016be8cba95d1920acc16be85555d837c579c3435db1395bdad011d"} Sep 30 10:06:17 crc kubenswrapper[4730]: I0930 10:06:17.183046 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d87b9d675-rjngv" event={"ID":"75687951-20a4-47c3-a5f2-397cdb1953f4","Type":"ContainerStarted","Data":"1679914c6933b7b227bc597fe27dd055dc244bb91a01509dcd80f9c5d5e00e72"} Sep 30 10:06:17 crc kubenswrapper[4730]: I0930 10:06:17.185334 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-zh8k5" event={"ID":"f609f208-0734-4364-b05e-0364bded655e","Type":"ContainerStarted","Data":"78faadd4d19a16bc41fbd31c5ac38d347e1a69efa816bd2078a824a9ad5fd67e"} Sep 30 10:06:17 crc kubenswrapper[4730]: I0930 10:06:17.185371 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-zh8k5" event={"ID":"f609f208-0734-4364-b05e-0364bded655e","Type":"ContainerStarted","Data":"ccdbfb378643dc345f5b3b2c6a4760bbc369c52cfe69b5f31a01bb1f570d08f9"} Sep 30 10:06:17 crc kubenswrapper[4730]: I0930 10:06:17.187942 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"57094072-3915-41c8-a4f8-35960aa068c4","Type":"ContainerStarted","Data":"63d2119ce9e953be65ebdea7c0fa72d610b8a3f1e4501388f3b01bf0e72ced4e"} Sep 30 10:06:17 crc kubenswrapper[4730]: I0930 10:06:17.190320 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"841c63b2-fda6-4269-8ba8-8567555326b4","Type":"ContainerStarted","Data":"020e3aab7c2588d440d07adf682fa56e2531eaef5823a49a0a2190435a15ec6d"} Sep 30 10:06:17 crc kubenswrapper[4730]: I0930 10:06:17.229588 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-zh8k5" podStartSLOduration=4.229564305 podStartE2EDuration="4.229564305s" podCreationTimestamp="2025-09-30 10:06:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:06:17.220902035 +0000 UTC m=+1021.554162028" watchObservedRunningTime="2025-09-30 10:06:17.229564305 +0000 UTC m=+1021.562824298" Sep 30 10:06:18 crc kubenswrapper[4730]: I0930 10:06:18.205040 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"841c63b2-fda6-4269-8ba8-8567555326b4","Type":"ContainerStarted","Data":"73ee4288c15824ff9172a8de0241f76d0a940eccfa0615fc4f8911cf907cb549"} Sep 30 10:06:18 crc kubenswrapper[4730]: I0930 10:06:18.205438 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"841c63b2-fda6-4269-8ba8-8567555326b4","Type":"ContainerStarted","Data":"be8465a85664997355b4975a46e03fdcba541f675adb1e7024c46107e4fbfdd7"} Sep 30 10:06:18 crc kubenswrapper[4730]: I0930 10:06:18.205458 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 30 10:06:18 crc kubenswrapper[4730]: I0930 10:06:18.207667 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d87b9d675-rjngv" event={"ID":"75687951-20a4-47c3-a5f2-397cdb1953f4","Type":"ContainerStarted","Data":"b360036e53fc39b990d51c018a8ef23f2941b9c1c08a23aef095462863feafd2"} Sep 30 10:06:18 crc kubenswrapper[4730]: I0930 10:06:18.207781 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:18 crc kubenswrapper[4730]: I0930 10:06:18.226174 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=4.731719411 podStartE2EDuration="5.226153523s" podCreationTimestamp="2025-09-30 10:06:13 +0000 UTC" firstStartedPulling="2025-09-30 10:06:16.573077586 +0000 UTC m=+1020.906337579" lastFinishedPulling="2025-09-30 10:06:17.067511698 +0000 UTC m=+1021.400771691" observedRunningTime="2025-09-30 10:06:18.222577149 +0000 UTC m=+1022.555837142" watchObservedRunningTime="2025-09-30 10:06:18.226153523 +0000 UTC m=+1022.559413516" Sep 30 10:06:18 crc kubenswrapper[4730]: I0930 10:06:18.245823 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d87b9d675-rjngv" podStartSLOduration=5.245803044 podStartE2EDuration="5.245803044s" podCreationTimestamp="2025-09-30 10:06:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:06:18.243302699 +0000 UTC m=+1022.576562702" watchObservedRunningTime="2025-09-30 10:06:18.245803044 +0000 UTC m=+1022.579063037" Sep 30 10:06:18 crc kubenswrapper[4730]: I0930 10:06:18.305599 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 30 10:06:18 crc kubenswrapper[4730]: I0930 10:06:18.376034 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 30 10:06:19 crc kubenswrapper[4730]: I0930 10:06:19.217567 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"57094072-3915-41c8-a4f8-35960aa068c4","Type":"ContainerStarted","Data":"4bfdc5dd0f0ab29d41265792bd623a5a786bfa8ef8df4dcd2fb6a22f2f27262d"} Sep 30 10:06:20 crc kubenswrapper[4730]: I0930 10:06:20.476468 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 30 10:06:20 crc kubenswrapper[4730]: I0930 10:06:20.548115 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 30 10:06:21 crc kubenswrapper[4730]: I0930 10:06:21.235515 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"57094072-3915-41c8-a4f8-35960aa068c4","Type":"ContainerStarted","Data":"e91a8f65260ecf780628fab713342fbc315724569c915ec8474d885a955c9712"} Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.374024 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=10.073036695 podStartE2EDuration="38.37400741s" podCreationTimestamp="2025-09-30 10:05:44 +0000 UTC" firstStartedPulling="2025-09-30 10:05:52.569558404 +0000 UTC m=+996.902818397" lastFinishedPulling="2025-09-30 10:06:20.870529119 +0000 UTC m=+1025.203789112" observedRunningTime="2025-09-30 10:06:21.261076686 +0000 UTC m=+1025.594336719" watchObservedRunningTime="2025-09-30 10:06:22.37400741 +0000 UTC m=+1026.707267393" Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.379027 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-6spxn"] Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.380730 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6spxn" Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.410783 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-6spxn"] Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.482152 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mlh2\" (UniqueName: \"kubernetes.io/projected/290bc85b-b1ab-4ddf-8e0f-a0b04df356bb-kube-api-access-2mlh2\") pod \"keystone-db-create-6spxn\" (UID: \"290bc85b-b1ab-4ddf-8e0f-a0b04df356bb\") " pod="openstack/keystone-db-create-6spxn" Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.584037 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mlh2\" (UniqueName: \"kubernetes.io/projected/290bc85b-b1ab-4ddf-8e0f-a0b04df356bb-kube-api-access-2mlh2\") pod \"keystone-db-create-6spxn\" (UID: \"290bc85b-b1ab-4ddf-8e0f-a0b04df356bb\") " pod="openstack/keystone-db-create-6spxn" Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.593016 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-2gnxz"] Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.594465 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-2gnxz" Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.615804 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-2gnxz"] Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.628330 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mlh2\" (UniqueName: \"kubernetes.io/projected/290bc85b-b1ab-4ddf-8e0f-a0b04df356bb-kube-api-access-2mlh2\") pod \"keystone-db-create-6spxn\" (UID: \"290bc85b-b1ab-4ddf-8e0f-a0b04df356bb\") " pod="openstack/keystone-db-create-6spxn" Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.686047 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79v2w\" (UniqueName: \"kubernetes.io/projected/f88317b4-c432-4d17-bc96-350867eadc61-kube-api-access-79v2w\") pod \"placement-db-create-2gnxz\" (UID: \"f88317b4-c432-4d17-bc96-350867eadc61\") " pod="openstack/placement-db-create-2gnxz" Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.715353 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6spxn" Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.788467 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79v2w\" (UniqueName: \"kubernetes.io/projected/f88317b4-c432-4d17-bc96-350867eadc61-kube-api-access-79v2w\") pod \"placement-db-create-2gnxz\" (UID: \"f88317b4-c432-4d17-bc96-350867eadc61\") " pod="openstack/placement-db-create-2gnxz" Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.816729 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79v2w\" (UniqueName: \"kubernetes.io/projected/f88317b4-c432-4d17-bc96-350867eadc61-kube-api-access-79v2w\") pod \"placement-db-create-2gnxz\" (UID: \"f88317b4-c432-4d17-bc96-350867eadc61\") " pod="openstack/placement-db-create-2gnxz" Sep 30 10:06:22 crc kubenswrapper[4730]: I0930 10:06:22.920899 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-2gnxz" Sep 30 10:06:23 crc kubenswrapper[4730]: I0930 10:06:23.165024 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-6spxn"] Sep 30 10:06:23 crc kubenswrapper[4730]: I0930 10:06:23.250789 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-6spxn" event={"ID":"290bc85b-b1ab-4ddf-8e0f-a0b04df356bb","Type":"ContainerStarted","Data":"7f8cbf9c1c78a4778f38de2b71f22cb4350450249ad6eadf3a6ab76dfc6b048e"} Sep 30 10:06:23 crc kubenswrapper[4730]: I0930 10:06:23.316368 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-2gnxz"] Sep 30 10:06:23 crc kubenswrapper[4730]: W0930 10:06:23.320724 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf88317b4_c432_4d17_bc96_350867eadc61.slice/crio-7082e6c95f678ac9531469678da07b318009f3bb6024fb440a150d1d639693de WatchSource:0}: Error finding container 7082e6c95f678ac9531469678da07b318009f3bb6024fb440a150d1d639693de: Status 404 returned error can't find the container with id 7082e6c95f678ac9531469678da07b318009f3bb6024fb440a150d1d639693de Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.027119 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.081829 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-654794fb59-cf4b6"] Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.082042 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-654794fb59-cf4b6" podUID="635201a1-9967-46a0-8561-14d9d70d0c6c" containerName="dnsmasq-dns" containerID="cri-o://cc4d6d696f0b2c2424e2d83548632b3c85eb39e49fc7fb669932f7467ec3909e" gracePeriod=10 Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.203466 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-create-87fm4"] Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.204861 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-87fm4" Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.213089 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-87fm4"] Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.261793 4730 generic.go:334] "Generic (PLEG): container finished" podID="290bc85b-b1ab-4ddf-8e0f-a0b04df356bb" containerID="fe67a7fa98146f0625851ff0f4258a3c3efbc57804d83781b0d5e4e768317831" exitCode=0 Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.261872 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-6spxn" event={"ID":"290bc85b-b1ab-4ddf-8e0f-a0b04df356bb","Type":"ContainerDied","Data":"fe67a7fa98146f0625851ff0f4258a3c3efbc57804d83781b0d5e4e768317831"} Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.265310 4730 generic.go:334] "Generic (PLEG): container finished" podID="635201a1-9967-46a0-8561-14d9d70d0c6c" containerID="cc4d6d696f0b2c2424e2d83548632b3c85eb39e49fc7fb669932f7467ec3909e" exitCode=0 Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.265400 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-654794fb59-cf4b6" event={"ID":"635201a1-9967-46a0-8561-14d9d70d0c6c","Type":"ContainerDied","Data":"cc4d6d696f0b2c2424e2d83548632b3c85eb39e49fc7fb669932f7467ec3909e"} Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.267250 4730 generic.go:334] "Generic (PLEG): container finished" podID="f88317b4-c432-4d17-bc96-350867eadc61" containerID="a625614874f466d491036e6d2bfd4d978110c68da3dcf20258720f5c49f93acf" exitCode=0 Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.267290 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-2gnxz" event={"ID":"f88317b4-c432-4d17-bc96-350867eadc61","Type":"ContainerDied","Data":"a625614874f466d491036e6d2bfd4d978110c68da3dcf20258720f5c49f93acf"} Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.267313 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-2gnxz" event={"ID":"f88317b4-c432-4d17-bc96-350867eadc61","Type":"ContainerStarted","Data":"7082e6c95f678ac9531469678da07b318009f3bb6024fb440a150d1d639693de"} Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.311528 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltk66\" (UniqueName: \"kubernetes.io/projected/fa1eee2f-4cdc-408e-b5ec-e142f7a33250-kube-api-access-ltk66\") pod \"watcher-db-create-87fm4\" (UID: \"fa1eee2f-4cdc-408e-b5ec-e142f7a33250\") " pod="openstack/watcher-db-create-87fm4" Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.413952 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltk66\" (UniqueName: \"kubernetes.io/projected/fa1eee2f-4cdc-408e-b5ec-e142f7a33250-kube-api-access-ltk66\") pod \"watcher-db-create-87fm4\" (UID: \"fa1eee2f-4cdc-408e-b5ec-e142f7a33250\") " pod="openstack/watcher-db-create-87fm4" Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.444921 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltk66\" (UniqueName: \"kubernetes.io/projected/fa1eee2f-4cdc-408e-b5ec-e142f7a33250-kube-api-access-ltk66\") pod \"watcher-db-create-87fm4\" (UID: \"fa1eee2f-4cdc-408e-b5ec-e142f7a33250\") " pod="openstack/watcher-db-create-87fm4" Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.556184 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-87fm4" Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.625057 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.719635 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2hlz\" (UniqueName: \"kubernetes.io/projected/635201a1-9967-46a0-8561-14d9d70d0c6c-kube-api-access-d2hlz\") pod \"635201a1-9967-46a0-8561-14d9d70d0c6c\" (UID: \"635201a1-9967-46a0-8561-14d9d70d0c6c\") " Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.719747 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/635201a1-9967-46a0-8561-14d9d70d0c6c-config\") pod \"635201a1-9967-46a0-8561-14d9d70d0c6c\" (UID: \"635201a1-9967-46a0-8561-14d9d70d0c6c\") " Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.719806 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/635201a1-9967-46a0-8561-14d9d70d0c6c-dns-svc\") pod \"635201a1-9967-46a0-8561-14d9d70d0c6c\" (UID: \"635201a1-9967-46a0-8561-14d9d70d0c6c\") " Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.724200 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/635201a1-9967-46a0-8561-14d9d70d0c6c-kube-api-access-d2hlz" (OuterVolumeSpecName: "kube-api-access-d2hlz") pod "635201a1-9967-46a0-8561-14d9d70d0c6c" (UID: "635201a1-9967-46a0-8561-14d9d70d0c6c"). InnerVolumeSpecName "kube-api-access-d2hlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.766957 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/635201a1-9967-46a0-8561-14d9d70d0c6c-config" (OuterVolumeSpecName: "config") pod "635201a1-9967-46a0-8561-14d9d70d0c6c" (UID: "635201a1-9967-46a0-8561-14d9d70d0c6c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.778381 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/635201a1-9967-46a0-8561-14d9d70d0c6c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "635201a1-9967-46a0-8561-14d9d70d0c6c" (UID: "635201a1-9967-46a0-8561-14d9d70d0c6c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.822377 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/635201a1-9967-46a0-8561-14d9d70d0c6c-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.822423 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2hlz\" (UniqueName: \"kubernetes.io/projected/635201a1-9967-46a0-8561-14d9d70d0c6c-kube-api-access-d2hlz\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:24 crc kubenswrapper[4730]: I0930 10:06:24.822437 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/635201a1-9967-46a0-8561-14d9d70d0c6c-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.012120 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-create-87fm4"] Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.277510 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-87fm4" event={"ID":"fa1eee2f-4cdc-408e-b5ec-e142f7a33250","Type":"ContainerStarted","Data":"d3670ec3a9701dd9ff30e35035e432537d3c7549dec022d7dcfb55ce743c6d5a"} Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.277576 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-87fm4" event={"ID":"fa1eee2f-4cdc-408e-b5ec-e142f7a33250","Type":"ContainerStarted","Data":"4977420e5baa359eb7d3c9afc1d381b1378294ce0dba3a7eec64464b2e8ce869"} Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.286506 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-654794fb59-cf4b6" Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.289791 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-654794fb59-cf4b6" event={"ID":"635201a1-9967-46a0-8561-14d9d70d0c6c","Type":"ContainerDied","Data":"37a6c925df73fd273a4336da53a6ee4448ec403399d475845154dae4e9c61914"} Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.289888 4730 scope.go:117] "RemoveContainer" containerID="cc4d6d696f0b2c2424e2d83548632b3c85eb39e49fc7fb669932f7467ec3909e" Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.309864 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-db-create-87fm4" podStartSLOduration=1.309843384 podStartE2EDuration="1.309843384s" podCreationTimestamp="2025-09-30 10:06:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:06:25.30519251 +0000 UTC m=+1029.638452523" watchObservedRunningTime="2025-09-30 10:06:25.309843384 +0000 UTC m=+1029.643103377" Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.348141 4730 scope.go:117] "RemoveContainer" containerID="75ec31386f89b9acd7913cb35a9db18a9e811ff2e4ee1a000d8249370abe0799" Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.355421 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-654794fb59-cf4b6"] Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.361201 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-654794fb59-cf4b6"] Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.527829 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.615195 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-2gnxz" Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.689603 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6spxn" Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.736362 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79v2w\" (UniqueName: \"kubernetes.io/projected/f88317b4-c432-4d17-bc96-350867eadc61-kube-api-access-79v2w\") pod \"f88317b4-c432-4d17-bc96-350867eadc61\" (UID: \"f88317b4-c432-4d17-bc96-350867eadc61\") " Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.745400 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88317b4-c432-4d17-bc96-350867eadc61-kube-api-access-79v2w" (OuterVolumeSpecName: "kube-api-access-79v2w") pod "f88317b4-c432-4d17-bc96-350867eadc61" (UID: "f88317b4-c432-4d17-bc96-350867eadc61"). InnerVolumeSpecName "kube-api-access-79v2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.838683 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mlh2\" (UniqueName: \"kubernetes.io/projected/290bc85b-b1ab-4ddf-8e0f-a0b04df356bb-kube-api-access-2mlh2\") pod \"290bc85b-b1ab-4ddf-8e0f-a0b04df356bb\" (UID: \"290bc85b-b1ab-4ddf-8e0f-a0b04df356bb\") " Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.839045 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79v2w\" (UniqueName: \"kubernetes.io/projected/f88317b4-c432-4d17-bc96-350867eadc61-kube-api-access-79v2w\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.841625 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/290bc85b-b1ab-4ddf-8e0f-a0b04df356bb-kube-api-access-2mlh2" (OuterVolumeSpecName: "kube-api-access-2mlh2") pod "290bc85b-b1ab-4ddf-8e0f-a0b04df356bb" (UID: "290bc85b-b1ab-4ddf-8e0f-a0b04df356bb"). InnerVolumeSpecName "kube-api-access-2mlh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:25 crc kubenswrapper[4730]: I0930 10:06:25.940587 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mlh2\" (UniqueName: \"kubernetes.io/projected/290bc85b-b1ab-4ddf-8e0f-a0b04df356bb-kube-api-access-2mlh2\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:26 crc kubenswrapper[4730]: I0930 10:06:26.303915 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-2gnxz" event={"ID":"f88317b4-c432-4d17-bc96-350867eadc61","Type":"ContainerDied","Data":"7082e6c95f678ac9531469678da07b318009f3bb6024fb440a150d1d639693de"} Sep 30 10:06:26 crc kubenswrapper[4730]: I0930 10:06:26.303989 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7082e6c95f678ac9531469678da07b318009f3bb6024fb440a150d1d639693de" Sep 30 10:06:26 crc kubenswrapper[4730]: I0930 10:06:26.305566 4730 generic.go:334] "Generic (PLEG): container finished" podID="fa1eee2f-4cdc-408e-b5ec-e142f7a33250" containerID="d3670ec3a9701dd9ff30e35035e432537d3c7549dec022d7dcfb55ce743c6d5a" exitCode=0 Sep 30 10:06:26 crc kubenswrapper[4730]: I0930 10:06:26.305702 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-87fm4" event={"ID":"fa1eee2f-4cdc-408e-b5ec-e142f7a33250","Type":"ContainerDied","Data":"d3670ec3a9701dd9ff30e35035e432537d3c7549dec022d7dcfb55ce743c6d5a"} Sep 30 10:06:26 crc kubenswrapper[4730]: I0930 10:06:26.307906 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-2gnxz" Sep 30 10:06:26 crc kubenswrapper[4730]: I0930 10:06:26.308452 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-6spxn" event={"ID":"290bc85b-b1ab-4ddf-8e0f-a0b04df356bb","Type":"ContainerDied","Data":"7f8cbf9c1c78a4778f38de2b71f22cb4350450249ad6eadf3a6ab76dfc6b048e"} Sep 30 10:06:26 crc kubenswrapper[4730]: I0930 10:06:26.308491 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f8cbf9c1c78a4778f38de2b71f22cb4350450249ad6eadf3a6ab76dfc6b048e" Sep 30 10:06:26 crc kubenswrapper[4730]: I0930 10:06:26.308560 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-6spxn" Sep 30 10:06:26 crc kubenswrapper[4730]: I0930 10:06:26.391091 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="635201a1-9967-46a0-8561-14d9d70d0c6c" path="/var/lib/kubelet/pods/635201a1-9967-46a0-8561-14d9d70d0c6c/volumes" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.671812 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-87fm4" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.774669 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltk66\" (UniqueName: \"kubernetes.io/projected/fa1eee2f-4cdc-408e-b5ec-e142f7a33250-kube-api-access-ltk66\") pod \"fa1eee2f-4cdc-408e-b5ec-e142f7a33250\" (UID: \"fa1eee2f-4cdc-408e-b5ec-e142f7a33250\") " Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.781811 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa1eee2f-4cdc-408e-b5ec-e142f7a33250-kube-api-access-ltk66" (OuterVolumeSpecName: "kube-api-access-ltk66") pod "fa1eee2f-4cdc-408e-b5ec-e142f7a33250" (UID: "fa1eee2f-4cdc-408e-b5ec-e142f7a33250"). InnerVolumeSpecName "kube-api-access-ltk66". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.865869 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-85hvz"] Sep 30 10:06:27 crc kubenswrapper[4730]: E0930 10:06:27.866365 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f88317b4-c432-4d17-bc96-350867eadc61" containerName="mariadb-database-create" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.866395 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f88317b4-c432-4d17-bc96-350867eadc61" containerName="mariadb-database-create" Sep 30 10:06:27 crc kubenswrapper[4730]: E0930 10:06:27.866431 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="635201a1-9967-46a0-8561-14d9d70d0c6c" containerName="init" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.866443 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="635201a1-9967-46a0-8561-14d9d70d0c6c" containerName="init" Sep 30 10:06:27 crc kubenswrapper[4730]: E0930 10:06:27.866467 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="290bc85b-b1ab-4ddf-8e0f-a0b04df356bb" containerName="mariadb-database-create" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.866480 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="290bc85b-b1ab-4ddf-8e0f-a0b04df356bb" containerName="mariadb-database-create" Sep 30 10:06:27 crc kubenswrapper[4730]: E0930 10:06:27.866513 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa1eee2f-4cdc-408e-b5ec-e142f7a33250" containerName="mariadb-database-create" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.866559 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa1eee2f-4cdc-408e-b5ec-e142f7a33250" containerName="mariadb-database-create" Sep 30 10:06:27 crc kubenswrapper[4730]: E0930 10:06:27.866585 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="635201a1-9967-46a0-8561-14d9d70d0c6c" containerName="dnsmasq-dns" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.866596 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="635201a1-9967-46a0-8561-14d9d70d0c6c" containerName="dnsmasq-dns" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.866886 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="290bc85b-b1ab-4ddf-8e0f-a0b04df356bb" containerName="mariadb-database-create" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.866935 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="635201a1-9967-46a0-8561-14d9d70d0c6c" containerName="dnsmasq-dns" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.866995 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa1eee2f-4cdc-408e-b5ec-e142f7a33250" containerName="mariadb-database-create" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.867024 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f88317b4-c432-4d17-bc96-350867eadc61" containerName="mariadb-database-create" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.868021 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-85hvz" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.876112 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltk66\" (UniqueName: \"kubernetes.io/projected/fa1eee2f-4cdc-408e-b5ec-e142f7a33250-kube-api-access-ltk66\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.879994 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-85hvz"] Sep 30 10:06:27 crc kubenswrapper[4730]: I0930 10:06:27.977903 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv7mb\" (UniqueName: \"kubernetes.io/projected/a955da24-88da-474d-9370-f146730abd1a-kube-api-access-zv7mb\") pod \"glance-db-create-85hvz\" (UID: \"a955da24-88da-474d-9370-f146730abd1a\") " pod="openstack/glance-db-create-85hvz" Sep 30 10:06:28 crc kubenswrapper[4730]: I0930 10:06:28.080004 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv7mb\" (UniqueName: \"kubernetes.io/projected/a955da24-88da-474d-9370-f146730abd1a-kube-api-access-zv7mb\") pod \"glance-db-create-85hvz\" (UID: \"a955da24-88da-474d-9370-f146730abd1a\") " pod="openstack/glance-db-create-85hvz" Sep 30 10:06:28 crc kubenswrapper[4730]: I0930 10:06:28.114774 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv7mb\" (UniqueName: \"kubernetes.io/projected/a955da24-88da-474d-9370-f146730abd1a-kube-api-access-zv7mb\") pod \"glance-db-create-85hvz\" (UID: \"a955da24-88da-474d-9370-f146730abd1a\") " pod="openstack/glance-db-create-85hvz" Sep 30 10:06:28 crc kubenswrapper[4730]: I0930 10:06:28.187600 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-85hvz" Sep 30 10:06:28 crc kubenswrapper[4730]: I0930 10:06:28.338353 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-create-87fm4" event={"ID":"fa1eee2f-4cdc-408e-b5ec-e142f7a33250","Type":"ContainerDied","Data":"4977420e5baa359eb7d3c9afc1d381b1378294ce0dba3a7eec64464b2e8ce869"} Sep 30 10:06:28 crc kubenswrapper[4730]: I0930 10:06:28.338396 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4977420e5baa359eb7d3c9afc1d381b1378294ce0dba3a7eec64464b2e8ce869" Sep 30 10:06:28 crc kubenswrapper[4730]: I0930 10:06:28.338427 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-create-87fm4" Sep 30 10:06:28 crc kubenswrapper[4730]: I0930 10:06:28.648266 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-85hvz"] Sep 30 10:06:28 crc kubenswrapper[4730]: W0930 10:06:28.649305 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda955da24_88da_474d_9370_f146730abd1a.slice/crio-efcd816fdd8465c0e51519b674f547d403ea28a47468c8fddda9aa53c5bc259d WatchSource:0}: Error finding container efcd816fdd8465c0e51519b674f547d403ea28a47468c8fddda9aa53c5bc259d: Status 404 returned error can't find the container with id efcd816fdd8465c0e51519b674f547d403ea28a47468c8fddda9aa53c5bc259d Sep 30 10:06:29 crc kubenswrapper[4730]: I0930 10:06:29.018339 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 30 10:06:29 crc kubenswrapper[4730]: I0930 10:06:29.348344 4730 generic.go:334] "Generic (PLEG): container finished" podID="a955da24-88da-474d-9370-f146730abd1a" containerID="cc7a7ecf9857678d29345ce498e3b8a2aada250518e6e6732272d075f3903dee" exitCode=0 Sep 30 10:06:29 crc kubenswrapper[4730]: I0930 10:06:29.348485 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-85hvz" event={"ID":"a955da24-88da-474d-9370-f146730abd1a","Type":"ContainerDied","Data":"cc7a7ecf9857678d29345ce498e3b8a2aada250518e6e6732272d075f3903dee"} Sep 30 10:06:29 crc kubenswrapper[4730]: I0930 10:06:29.348802 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-85hvz" event={"ID":"a955da24-88da-474d-9370-f146730abd1a","Type":"ContainerStarted","Data":"efcd816fdd8465c0e51519b674f547d403ea28a47468c8fddda9aa53c5bc259d"} Sep 30 10:06:30 crc kubenswrapper[4730]: I0930 10:06:30.527804 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:30 crc kubenswrapper[4730]: I0930 10:06:30.531894 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:30 crc kubenswrapper[4730]: I0930 10:06:30.716641 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-85hvz" Sep 30 10:06:30 crc kubenswrapper[4730]: I0930 10:06:30.827769 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zv7mb\" (UniqueName: \"kubernetes.io/projected/a955da24-88da-474d-9370-f146730abd1a-kube-api-access-zv7mb\") pod \"a955da24-88da-474d-9370-f146730abd1a\" (UID: \"a955da24-88da-474d-9370-f146730abd1a\") " Sep 30 10:06:30 crc kubenswrapper[4730]: I0930 10:06:30.835248 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a955da24-88da-474d-9370-f146730abd1a-kube-api-access-zv7mb" (OuterVolumeSpecName: "kube-api-access-zv7mb") pod "a955da24-88da-474d-9370-f146730abd1a" (UID: "a955da24-88da-474d-9370-f146730abd1a"). InnerVolumeSpecName "kube-api-access-zv7mb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:30 crc kubenswrapper[4730]: I0930 10:06:30.929580 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zv7mb\" (UniqueName: \"kubernetes.io/projected/a955da24-88da-474d-9370-f146730abd1a-kube-api-access-zv7mb\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:31 crc kubenswrapper[4730]: I0930 10:06:31.369036 4730 generic.go:334] "Generic (PLEG): container finished" podID="b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" containerID="8dafeeba9b157ce2800566bddda816b3d119847d03a7d6c84aff95e1c288798c" exitCode=0 Sep 30 10:06:31 crc kubenswrapper[4730]: I0930 10:06:31.369136 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2","Type":"ContainerDied","Data":"8dafeeba9b157ce2800566bddda816b3d119847d03a7d6c84aff95e1c288798c"} Sep 30 10:06:31 crc kubenswrapper[4730]: I0930 10:06:31.383068 4730 generic.go:334] "Generic (PLEG): container finished" podID="7ba6b518-edfa-4d19-b096-03d7d96c51a3" containerID="fd068bf37f46b20a26b96b74ca949fe1204bb5f3453145a5f8f166a1b8c48c52" exitCode=0 Sep 30 10:06:31 crc kubenswrapper[4730]: I0930 10:06:31.383160 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"7ba6b518-edfa-4d19-b096-03d7d96c51a3","Type":"ContainerDied","Data":"fd068bf37f46b20a26b96b74ca949fe1204bb5f3453145a5f8f166a1b8c48c52"} Sep 30 10:06:31 crc kubenswrapper[4730]: I0930 10:06:31.391384 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-85hvz" event={"ID":"a955da24-88da-474d-9370-f146730abd1a","Type":"ContainerDied","Data":"efcd816fdd8465c0e51519b674f547d403ea28a47468c8fddda9aa53c5bc259d"} Sep 30 10:06:31 crc kubenswrapper[4730]: I0930 10:06:31.391452 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efcd816fdd8465c0e51519b674f547d403ea28a47468c8fddda9aa53c5bc259d" Sep 30 10:06:31 crc kubenswrapper[4730]: I0930 10:06:31.391481 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-85hvz" Sep 30 10:06:31 crc kubenswrapper[4730]: I0930 10:06:31.393678 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.391681 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-2c9c-account-create-9hs8k"] Sep 30 10:06:32 crc kubenswrapper[4730]: E0930 10:06:32.392330 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a955da24-88da-474d-9370-f146730abd1a" containerName="mariadb-database-create" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.392345 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a955da24-88da-474d-9370-f146730abd1a" containerName="mariadb-database-create" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.392544 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="a955da24-88da-474d-9370-f146730abd1a" containerName="mariadb-database-create" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.393152 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-2c9c-account-create-9hs8k" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.401912 4730 generic.go:334] "Generic (PLEG): container finished" podID="6cf0ebea-06fc-47b2-a2c6-95605e023f94" containerID="587710b1ff8f9bce76f7bf39a64d961cf638fbe04d60ee8225504cae9015fcfb" exitCode=0 Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.401990 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6cf0ebea-06fc-47b2-a2c6-95605e023f94","Type":"ContainerDied","Data":"587710b1ff8f9bce76f7bf39a64d961cf638fbe04d60ee8225504cae9015fcfb"} Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.404287 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2","Type":"ContainerStarted","Data":"32968aaf1a40ad69e937f609110da6deb679b6cf3fb2a01b61fc8aab90a2316c"} Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.404396 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-2c9c-account-create-9hs8k"] Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.404480 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.406831 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.408536 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-notifications-server-0" event={"ID":"7ba6b518-edfa-4d19-b096-03d7d96c51a3","Type":"ContainerStarted","Data":"ec758e4553bf07c609402234dc8b40c662d3360f4acf8d31e11ae22a7de697e4"} Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.408813 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.475622 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlvnh\" (UniqueName: \"kubernetes.io/projected/801c2fbd-0d18-4528-9baf-8ace34906cfa-kube-api-access-tlvnh\") pod \"keystone-2c9c-account-create-9hs8k\" (UID: \"801c2fbd-0d18-4528-9baf-8ace34906cfa\") " pod="openstack/keystone-2c9c-account-create-9hs8k" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.488116 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-notifications-server-0" podStartSLOduration=48.520744777 podStartE2EDuration="55.488094272s" podCreationTimestamp="2025-09-30 10:05:37 +0000 UTC" firstStartedPulling="2025-09-30 10:05:51.202977964 +0000 UTC m=+995.536237957" lastFinishedPulling="2025-09-30 10:05:58.170327469 +0000 UTC m=+1002.503587452" observedRunningTime="2025-09-30 10:06:32.467031227 +0000 UTC m=+1036.800291240" watchObservedRunningTime="2025-09-30 10:06:32.488094272 +0000 UTC m=+1036.821354265" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.510026 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=49.361219952 podStartE2EDuration="55.51000551s" podCreationTimestamp="2025-09-30 10:05:37 +0000 UTC" firstStartedPulling="2025-09-30 10:05:52.56713598 +0000 UTC m=+996.900395973" lastFinishedPulling="2025-09-30 10:05:58.715921538 +0000 UTC m=+1003.049181531" observedRunningTime="2025-09-30 10:06:32.500255212 +0000 UTC m=+1036.833515215" watchObservedRunningTime="2025-09-30 10:06:32.51000551 +0000 UTC m=+1036.843265503" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.580782 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlvnh\" (UniqueName: \"kubernetes.io/projected/801c2fbd-0d18-4528-9baf-8ace34906cfa-kube-api-access-tlvnh\") pod \"keystone-2c9c-account-create-9hs8k\" (UID: \"801c2fbd-0d18-4528-9baf-8ace34906cfa\") " pod="openstack/keystone-2c9c-account-create-9hs8k" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.599911 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlvnh\" (UniqueName: \"kubernetes.io/projected/801c2fbd-0d18-4528-9baf-8ace34906cfa-kube-api-access-tlvnh\") pod \"keystone-2c9c-account-create-9hs8k\" (UID: \"801c2fbd-0d18-4528-9baf-8ace34906cfa\") " pod="openstack/keystone-2c9c-account-create-9hs8k" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.700271 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-9b71-account-create-5mbkm"] Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.701562 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9b71-account-create-5mbkm" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.710803 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-2c9c-account-create-9hs8k" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.712243 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.744726 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-9b71-account-create-5mbkm"] Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.790386 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-4tlvb" podUID="d247bfe5-48d7-49be-9cd4-2d3368015e3a" containerName="ovn-controller" probeResult="failure" output=< Sep 30 10:06:32 crc kubenswrapper[4730]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 30 10:06:32 crc kubenswrapper[4730]: > Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.795420 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.800449 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-wb9fw" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.891045 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsjx2\" (UniqueName: \"kubernetes.io/projected/c38c27b7-08c7-4d1e-b151-3b092180bfc9-kube-api-access-qsjx2\") pod \"placement-9b71-account-create-5mbkm\" (UID: \"c38c27b7-08c7-4d1e-b151-3b092180bfc9\") " pod="openstack/placement-9b71-account-create-5mbkm" Sep 30 10:06:32 crc kubenswrapper[4730]: I0930 10:06:32.992735 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsjx2\" (UniqueName: \"kubernetes.io/projected/c38c27b7-08c7-4d1e-b151-3b092180bfc9-kube-api-access-qsjx2\") pod \"placement-9b71-account-create-5mbkm\" (UID: \"c38c27b7-08c7-4d1e-b151-3b092180bfc9\") " pod="openstack/placement-9b71-account-create-5mbkm" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.016197 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsjx2\" (UniqueName: \"kubernetes.io/projected/c38c27b7-08c7-4d1e-b151-3b092180bfc9-kube-api-access-qsjx2\") pod \"placement-9b71-account-create-5mbkm\" (UID: \"c38c27b7-08c7-4d1e-b151-3b092180bfc9\") " pod="openstack/placement-9b71-account-create-5mbkm" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.070753 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4tlvb-config-xpcw2"] Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.072039 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.077576 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.090968 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4tlvb-config-xpcw2"] Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.116208 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9b71-account-create-5mbkm" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.196509 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkj9v\" (UniqueName: \"kubernetes.io/projected/38b9a04a-3f28-4fe7-8529-38195af0c035-kube-api-access-lkj9v\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.196620 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38b9a04a-3f28-4fe7-8529-38195af0c035-scripts\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.196704 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/38b9a04a-3f28-4fe7-8529-38195af0c035-additional-scripts\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.196759 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-run\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.196807 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-log-ovn\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.196989 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-run-ovn\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.298115 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-run\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.298196 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-log-ovn\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.298252 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-run-ovn\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.298286 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkj9v\" (UniqueName: \"kubernetes.io/projected/38b9a04a-3f28-4fe7-8529-38195af0c035-kube-api-access-lkj9v\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.298341 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38b9a04a-3f28-4fe7-8529-38195af0c035-scripts\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.298403 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/38b9a04a-3f28-4fe7-8529-38195af0c035-additional-scripts\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.298563 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-log-ovn\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.298595 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-run-ovn\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.299314 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/38b9a04a-3f28-4fe7-8529-38195af0c035-additional-scripts\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.300659 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38b9a04a-3f28-4fe7-8529-38195af0c035-scripts\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.300727 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-run\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.307811 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-2c9c-account-create-9hs8k"] Sep 30 10:06:33 crc kubenswrapper[4730]: W0930 10:06:33.309688 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod801c2fbd_0d18_4528_9baf_8ace34906cfa.slice/crio-1cdc28e7b2405b1f6517001e9307b5d90310db683239a4df44db3905387033a2 WatchSource:0}: Error finding container 1cdc28e7b2405b1f6517001e9307b5d90310db683239a4df44db3905387033a2: Status 404 returned error can't find the container with id 1cdc28e7b2405b1f6517001e9307b5d90310db683239a4df44db3905387033a2 Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.323757 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkj9v\" (UniqueName: \"kubernetes.io/projected/38b9a04a-3f28-4fe7-8529-38195af0c035-kube-api-access-lkj9v\") pod \"ovn-controller-4tlvb-config-xpcw2\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.393903 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.422451 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6cf0ebea-06fc-47b2-a2c6-95605e023f94","Type":"ContainerStarted","Data":"d5c17e01b1b9d8c9200a2d1eacf6a349ed0515db67e3b0da61dfe2991655125e"} Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.422691 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.429136 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-2c9c-account-create-9hs8k" event={"ID":"801c2fbd-0d18-4528-9baf-8ace34906cfa","Type":"ContainerStarted","Data":"1cdc28e7b2405b1f6517001e9307b5d90310db683239a4df44db3905387033a2"} Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.463051 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=50.460570494 podStartE2EDuration="56.463025671s" podCreationTimestamp="2025-09-30 10:05:37 +0000 UTC" firstStartedPulling="2025-09-30 10:05:52.71345052 +0000 UTC m=+997.046710513" lastFinishedPulling="2025-09-30 10:05:58.715905697 +0000 UTC m=+1003.049165690" observedRunningTime="2025-09-30 10:06:33.448377038 +0000 UTC m=+1037.781637051" watchObservedRunningTime="2025-09-30 10:06:33.463025671 +0000 UTC m=+1037.796285664" Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.629496 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-9b71-account-create-5mbkm"] Sep 30 10:06:33 crc kubenswrapper[4730]: I0930 10:06:33.890464 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4tlvb-config-xpcw2"] Sep 30 10:06:33 crc kubenswrapper[4730]: W0930 10:06:33.895477 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38b9a04a_3f28_4fe7_8529_38195af0c035.slice/crio-01b242913f847e135308064c78eb6f39b5f794a6e2b84b8ebc2ca64fffb25f9c WatchSource:0}: Error finding container 01b242913f847e135308064c78eb6f39b5f794a6e2b84b8ebc2ca64fffb25f9c: Status 404 returned error can't find the container with id 01b242913f847e135308064c78eb6f39b5f794a6e2b84b8ebc2ca64fffb25f9c Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.232508 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.232765 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="prometheus" containerID="cri-o://63d2119ce9e953be65ebdea7c0fa72d610b8a3f1e4501388f3b01bf0e72ced4e" gracePeriod=600 Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.233105 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="thanos-sidecar" containerID="cri-o://e91a8f65260ecf780628fab713342fbc315724569c915ec8474d885a955c9712" gracePeriod=600 Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.233152 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="config-reloader" containerID="cri-o://4bfdc5dd0f0ab29d41265792bd623a5a786bfa8ef8df4dcd2fb6a22f2f27262d" gracePeriod=600 Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.338958 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-f3dc-account-create-ncmmt"] Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.340345 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-f3dc-account-create-ncmmt" Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.348946 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-db-secret" Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.362766 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-f3dc-account-create-ncmmt"] Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.422150 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q4mh\" (UniqueName: \"kubernetes.io/projected/835855e8-ece2-4510-848b-f2851ba7aaa4-kube-api-access-2q4mh\") pod \"watcher-f3dc-account-create-ncmmt\" (UID: \"835855e8-ece2-4510-848b-f2851ba7aaa4\") " pod="openstack/watcher-f3dc-account-create-ncmmt" Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.437334 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9b71-account-create-5mbkm" event={"ID":"c38c27b7-08c7-4d1e-b151-3b092180bfc9","Type":"ContainerStarted","Data":"a00965f2392107d825f4328dad53d8980edd829bfcde0ba1425b4b71c269de8f"} Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.438837 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4tlvb-config-xpcw2" event={"ID":"38b9a04a-3f28-4fe7-8529-38195af0c035","Type":"ContainerStarted","Data":"01b242913f847e135308064c78eb6f39b5f794a6e2b84b8ebc2ca64fffb25f9c"} Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.523828 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q4mh\" (UniqueName: \"kubernetes.io/projected/835855e8-ece2-4510-848b-f2851ba7aaa4-kube-api-access-2q4mh\") pod \"watcher-f3dc-account-create-ncmmt\" (UID: \"835855e8-ece2-4510-848b-f2851ba7aaa4\") " pod="openstack/watcher-f3dc-account-create-ncmmt" Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.550993 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q4mh\" (UniqueName: \"kubernetes.io/projected/835855e8-ece2-4510-848b-f2851ba7aaa4-kube-api-access-2q4mh\") pod \"watcher-f3dc-account-create-ncmmt\" (UID: \"835855e8-ece2-4510-848b-f2851ba7aaa4\") " pod="openstack/watcher-f3dc-account-create-ncmmt" Sep 30 10:06:34 crc kubenswrapper[4730]: I0930 10:06:34.670503 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-f3dc-account-create-ncmmt" Sep 30 10:06:35 crc kubenswrapper[4730]: I0930 10:06:35.450915 4730 generic.go:334] "Generic (PLEG): container finished" podID="57094072-3915-41c8-a4f8-35960aa068c4" containerID="e91a8f65260ecf780628fab713342fbc315724569c915ec8474d885a955c9712" exitCode=0 Sep 30 10:06:35 crc kubenswrapper[4730]: I0930 10:06:35.451288 4730 generic.go:334] "Generic (PLEG): container finished" podID="57094072-3915-41c8-a4f8-35960aa068c4" containerID="4bfdc5dd0f0ab29d41265792bd623a5a786bfa8ef8df4dcd2fb6a22f2f27262d" exitCode=0 Sep 30 10:06:35 crc kubenswrapper[4730]: I0930 10:06:35.451299 4730 generic.go:334] "Generic (PLEG): container finished" podID="57094072-3915-41c8-a4f8-35960aa068c4" containerID="63d2119ce9e953be65ebdea7c0fa72d610b8a3f1e4501388f3b01bf0e72ced4e" exitCode=0 Sep 30 10:06:35 crc kubenswrapper[4730]: I0930 10:06:35.451070 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"57094072-3915-41c8-a4f8-35960aa068c4","Type":"ContainerDied","Data":"e91a8f65260ecf780628fab713342fbc315724569c915ec8474d885a955c9712"} Sep 30 10:06:35 crc kubenswrapper[4730]: I0930 10:06:35.451395 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"57094072-3915-41c8-a4f8-35960aa068c4","Type":"ContainerDied","Data":"4bfdc5dd0f0ab29d41265792bd623a5a786bfa8ef8df4dcd2fb6a22f2f27262d"} Sep 30 10:06:35 crc kubenswrapper[4730]: I0930 10:06:35.451423 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"57094072-3915-41c8-a4f8-35960aa068c4","Type":"ContainerDied","Data":"63d2119ce9e953be65ebdea7c0fa72d610b8a3f1e4501388f3b01bf0e72ced4e"} Sep 30 10:06:35 crc kubenswrapper[4730]: I0930 10:06:35.453503 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-2c9c-account-create-9hs8k" event={"ID":"801c2fbd-0d18-4528-9baf-8ace34906cfa","Type":"ContainerStarted","Data":"8109803dfa723493b6d947432a8c0eece2256f69bc8f851b33750e5b32df299e"} Sep 30 10:06:35 crc kubenswrapper[4730]: I0930 10:06:35.527981 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.115:9090/-/ready\": dial tcp 10.217.0.115:9090: connect: connection refused" Sep 30 10:06:35 crc kubenswrapper[4730]: I0930 10:06:35.645476 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-f3dc-account-create-ncmmt"] Sep 30 10:06:35 crc kubenswrapper[4730]: W0930 10:06:35.649485 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod835855e8_ece2_4510_848b_f2851ba7aaa4.slice/crio-e3c282b1d57e8f9ee0a6d6263d2ad1933167d29d3bc61f592bacefee75e8b3af WatchSource:0}: Error finding container e3c282b1d57e8f9ee0a6d6263d2ad1933167d29d3bc61f592bacefee75e8b3af: Status 404 returned error can't find the container with id e3c282b1d57e8f9ee0a6d6263d2ad1933167d29d3bc61f592bacefee75e8b3af Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.462494 4730 generic.go:334] "Generic (PLEG): container finished" podID="835855e8-ece2-4510-848b-f2851ba7aaa4" containerID="2c2d89fa4ee358a5640c900e249c0beb20e2d16b9064e459053bc3f9233d4044" exitCode=0 Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.462594 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-f3dc-account-create-ncmmt" event={"ID":"835855e8-ece2-4510-848b-f2851ba7aaa4","Type":"ContainerDied","Data":"2c2d89fa4ee358a5640c900e249c0beb20e2d16b9064e459053bc3f9233d4044"} Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.463060 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-f3dc-account-create-ncmmt" event={"ID":"835855e8-ece2-4510-848b-f2851ba7aaa4","Type":"ContainerStarted","Data":"e3c282b1d57e8f9ee0a6d6263d2ad1933167d29d3bc61f592bacefee75e8b3af"} Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.467170 4730 generic.go:334] "Generic (PLEG): container finished" podID="801c2fbd-0d18-4528-9baf-8ace34906cfa" containerID="8109803dfa723493b6d947432a8c0eece2256f69bc8f851b33750e5b32df299e" exitCode=0 Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.467251 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-2c9c-account-create-9hs8k" event={"ID":"801c2fbd-0d18-4528-9baf-8ace34906cfa","Type":"ContainerDied","Data":"8109803dfa723493b6d947432a8c0eece2256f69bc8f851b33750e5b32df299e"} Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.468875 4730 generic.go:334] "Generic (PLEG): container finished" podID="c38c27b7-08c7-4d1e-b151-3b092180bfc9" containerID="7460154e675bd3f2277e724831914427d8b12f95ee175b1ab533adf3ab9dad47" exitCode=0 Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.468966 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9b71-account-create-5mbkm" event={"ID":"c38c27b7-08c7-4d1e-b151-3b092180bfc9","Type":"ContainerDied","Data":"7460154e675bd3f2277e724831914427d8b12f95ee175b1ab533adf3ab9dad47"} Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.470520 4730 generic.go:334] "Generic (PLEG): container finished" podID="38b9a04a-3f28-4fe7-8529-38195af0c035" containerID="5d878091fad4cae748fb5870a38179b6869c36256849c1d8c0d335fdf3097d1e" exitCode=0 Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.470572 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4tlvb-config-xpcw2" event={"ID":"38b9a04a-3f28-4fe7-8529-38195af0c035","Type":"ContainerDied","Data":"5d878091fad4cae748fb5870a38179b6869c36256849c1d8c0d335fdf3097d1e"} Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.683680 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.758173 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-web-config\") pod \"57094072-3915-41c8-a4f8-35960aa068c4\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.758235 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/57094072-3915-41c8-a4f8-35960aa068c4-tls-assets\") pod \"57094072-3915-41c8-a4f8-35960aa068c4\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.758307 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/57094072-3915-41c8-a4f8-35960aa068c4-config-out\") pod \"57094072-3915-41c8-a4f8-35960aa068c4\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.758386 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9f762\" (UniqueName: \"kubernetes.io/projected/57094072-3915-41c8-a4f8-35960aa068c4-kube-api-access-9f762\") pod \"57094072-3915-41c8-a4f8-35960aa068c4\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.758528 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"57094072-3915-41c8-a4f8-35960aa068c4\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.758559 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/57094072-3915-41c8-a4f8-35960aa068c4-prometheus-metric-storage-rulefiles-0\") pod \"57094072-3915-41c8-a4f8-35960aa068c4\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.758758 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-config\") pod \"57094072-3915-41c8-a4f8-35960aa068c4\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.758814 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-thanos-prometheus-http-client-file\") pod \"57094072-3915-41c8-a4f8-35960aa068c4\" (UID: \"57094072-3915-41c8-a4f8-35960aa068c4\") " Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.760661 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57094072-3915-41c8-a4f8-35960aa068c4-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "57094072-3915-41c8-a4f8-35960aa068c4" (UID: "57094072-3915-41c8-a4f8-35960aa068c4"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.765505 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57094072-3915-41c8-a4f8-35960aa068c4-kube-api-access-9f762" (OuterVolumeSpecName: "kube-api-access-9f762") pod "57094072-3915-41c8-a4f8-35960aa068c4" (UID: "57094072-3915-41c8-a4f8-35960aa068c4"). InnerVolumeSpecName "kube-api-access-9f762". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.766073 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "57094072-3915-41c8-a4f8-35960aa068c4" (UID: "57094072-3915-41c8-a4f8-35960aa068c4"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.766749 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57094072-3915-41c8-a4f8-35960aa068c4-config-out" (OuterVolumeSpecName: "config-out") pod "57094072-3915-41c8-a4f8-35960aa068c4" (UID: "57094072-3915-41c8-a4f8-35960aa068c4"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.766839 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57094072-3915-41c8-a4f8-35960aa068c4-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "57094072-3915-41c8-a4f8-35960aa068c4" (UID: "57094072-3915-41c8-a4f8-35960aa068c4"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.769288 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-config" (OuterVolumeSpecName: "config") pod "57094072-3915-41c8-a4f8-35960aa068c4" (UID: "57094072-3915-41c8-a4f8-35960aa068c4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.780497 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "57094072-3915-41c8-a4f8-35960aa068c4" (UID: "57094072-3915-41c8-a4f8-35960aa068c4"). InnerVolumeSpecName "pvc-5d620c66-5a11-463c-a9e7-c12e856084b2". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.789593 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-web-config" (OuterVolumeSpecName: "web-config") pod "57094072-3915-41c8-a4f8-35960aa068c4" (UID: "57094072-3915-41c8-a4f8-35960aa068c4"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.860251 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9f762\" (UniqueName: \"kubernetes.io/projected/57094072-3915-41c8-a4f8-35960aa068c4-kube-api-access-9f762\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.860501 4730 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") on node \"crc\" " Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.860858 4730 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/57094072-3915-41c8-a4f8-35960aa068c4-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.860968 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.861029 4730 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.861093 4730 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/57094072-3915-41c8-a4f8-35960aa068c4-web-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.861147 4730 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/57094072-3915-41c8-a4f8-35960aa068c4-tls-assets\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.861327 4730 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/57094072-3915-41c8-a4f8-35960aa068c4-config-out\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.887408 4730 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.887681 4730 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-5d620c66-5a11-463c-a9e7-c12e856084b2" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2") on node "crc" Sep 30 10:06:36 crc kubenswrapper[4730]: I0930 10:06:36.962820 4730 reconciler_common.go:293] "Volume detached for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.487560 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.494819 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"57094072-3915-41c8-a4f8-35960aa068c4","Type":"ContainerDied","Data":"a0165cb59e49af8f0985cbb559260514f3b1f8868cf3e8d151e84890a69ddc40"} Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.494888 4730 scope.go:117] "RemoveContainer" containerID="e91a8f65260ecf780628fab713342fbc315724569c915ec8474d885a955c9712" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.553303 4730 scope.go:117] "RemoveContainer" containerID="4bfdc5dd0f0ab29d41265792bd623a5a786bfa8ef8df4dcd2fb6a22f2f27262d" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.575879 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.591101 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.595822 4730 scope.go:117] "RemoveContainer" containerID="63d2119ce9e953be65ebdea7c0fa72d610b8a3f1e4501388f3b01bf0e72ced4e" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.600714 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:06:37 crc kubenswrapper[4730]: E0930 10:06:37.601153 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="config-reloader" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.601171 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="config-reloader" Sep 30 10:06:37 crc kubenswrapper[4730]: E0930 10:06:37.601200 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="init-config-reloader" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.601210 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="init-config-reloader" Sep 30 10:06:37 crc kubenswrapper[4730]: E0930 10:06:37.601225 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="thanos-sidecar" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.601233 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="thanos-sidecar" Sep 30 10:06:37 crc kubenswrapper[4730]: E0930 10:06:37.601250 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="prometheus" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.601258 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="prometheus" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.601454 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="config-reloader" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.601477 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="thanos-sidecar" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.601494 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="57094072-3915-41c8-a4f8-35960aa068c4" containerName="prometheus" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.603585 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.610221 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.610412 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.610503 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.610733 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-vs854" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.617711 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.630825 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.630907 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.631094 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.640125 4730 scope.go:117] "RemoveContainer" containerID="0c0f6b2933de5606710159b77ef3a6f54c31f39cc388a86d17f05d1fc154d3a7" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.676853 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.676910 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-config\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.676961 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dqhg\" (UniqueName: \"kubernetes.io/projected/711be93d-c342-44a5-aac9-ace1d09682a0-kube-api-access-6dqhg\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.677003 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.677035 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.677064 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/711be93d-c342-44a5-aac9-ace1d09682a0-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.677110 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/711be93d-c342-44a5-aac9-ace1d09682a0-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.677287 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/711be93d-c342-44a5-aac9-ace1d09682a0-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.677338 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.677517 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.677601 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.766209 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-4tlvb" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.778206 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.778272 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.778319 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.778339 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-config\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.778393 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dqhg\" (UniqueName: \"kubernetes.io/projected/711be93d-c342-44a5-aac9-ace1d09682a0-kube-api-access-6dqhg\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.778435 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.778453 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.778482 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/711be93d-c342-44a5-aac9-ace1d09682a0-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.778510 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/711be93d-c342-44a5-aac9-ace1d09682a0-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.778540 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/711be93d-c342-44a5-aac9-ace1d09682a0-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.778557 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.787270 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/711be93d-c342-44a5-aac9-ace1d09682a0-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.788178 4730 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.788206 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/cf539b04350c5d85f90d0468a4b3f4f72d24a709bb4a2121a25d26c6e8fc960c/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.791237 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.791997 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.799692 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-config\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.799969 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.800476 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.801155 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dqhg\" (UniqueName: \"kubernetes.io/projected/711be93d-c342-44a5-aac9-ace1d09682a0-kube-api-access-6dqhg\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.802401 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.809516 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/711be93d-c342-44a5-aac9-ace1d09682a0-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.814113 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/711be93d-c342-44a5-aac9-ace1d09682a0-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.827136 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"prometheus-metric-storage-0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.921054 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.944397 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.983830 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-run\") pod \"38b9a04a-3f28-4fe7-8529-38195af0c035\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.983889 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkj9v\" (UniqueName: \"kubernetes.io/projected/38b9a04a-3f28-4fe7-8529-38195af0c035-kube-api-access-lkj9v\") pod \"38b9a04a-3f28-4fe7-8529-38195af0c035\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.983943 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-log-ovn\") pod \"38b9a04a-3f28-4fe7-8529-38195af0c035\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.983980 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-run-ovn\") pod \"38b9a04a-3f28-4fe7-8529-38195af0c035\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.984033 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38b9a04a-3f28-4fe7-8529-38195af0c035-scripts\") pod \"38b9a04a-3f28-4fe7-8529-38195af0c035\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.984052 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/38b9a04a-3f28-4fe7-8529-38195af0c035-additional-scripts\") pod \"38b9a04a-3f28-4fe7-8529-38195af0c035\" (UID: \"38b9a04a-3f28-4fe7-8529-38195af0c035\") " Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.984356 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "38b9a04a-3f28-4fe7-8529-38195af0c035" (UID: "38b9a04a-3f28-4fe7-8529-38195af0c035"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.984394 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-run" (OuterVolumeSpecName: "var-run") pod "38b9a04a-3f28-4fe7-8529-38195af0c035" (UID: "38b9a04a-3f28-4fe7-8529-38195af0c035"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.984887 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "38b9a04a-3f28-4fe7-8529-38195af0c035" (UID: "38b9a04a-3f28-4fe7-8529-38195af0c035"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.985103 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38b9a04a-3f28-4fe7-8529-38195af0c035-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "38b9a04a-3f28-4fe7-8529-38195af0c035" (UID: "38b9a04a-3f28-4fe7-8529-38195af0c035"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:06:37 crc kubenswrapper[4730]: I0930 10:06:37.986031 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38b9a04a-3f28-4fe7-8529-38195af0c035-scripts" (OuterVolumeSpecName: "scripts") pod "38b9a04a-3f28-4fe7-8529-38195af0c035" (UID: "38b9a04a-3f28-4fe7-8529-38195af0c035"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.003843 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38b9a04a-3f28-4fe7-8529-38195af0c035-kube-api-access-lkj9v" (OuterVolumeSpecName: "kube-api-access-lkj9v") pod "38b9a04a-3f28-4fe7-8529-38195af0c035" (UID: "38b9a04a-3f28-4fe7-8529-38195af0c035"). InnerVolumeSpecName "kube-api-access-lkj9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.023463 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-5c12-account-create-w4fmn"] Sep 30 10:06:38 crc kubenswrapper[4730]: E0930 10:06:38.023932 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38b9a04a-3f28-4fe7-8529-38195af0c035" containerName="ovn-config" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.023957 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="38b9a04a-3f28-4fe7-8529-38195af0c035" containerName="ovn-config" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.024152 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="38b9a04a-3f28-4fe7-8529-38195af0c035" containerName="ovn-config" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.024911 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5c12-account-create-w4fmn" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.034471 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.035413 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5c12-account-create-w4fmn"] Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.085579 4730 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.085603 4730 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.085635 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38b9a04a-3f28-4fe7-8529-38195af0c035-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.085646 4730 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/38b9a04a-3f28-4fe7-8529-38195af0c035-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.085655 4730 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/38b9a04a-3f28-4fe7-8529-38195af0c035-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.085663 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkj9v\" (UniqueName: \"kubernetes.io/projected/38b9a04a-3f28-4fe7-8529-38195af0c035-kube-api-access-lkj9v\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.094042 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-2c9c-account-create-9hs8k" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.123485 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-f3dc-account-create-ncmmt" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.183542 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9b71-account-create-5mbkm" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.186947 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8twlc\" (UniqueName: \"kubernetes.io/projected/1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a-kube-api-access-8twlc\") pod \"glance-5c12-account-create-w4fmn\" (UID: \"1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a\") " pod="openstack/glance-5c12-account-create-w4fmn" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.287898 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsjx2\" (UniqueName: \"kubernetes.io/projected/c38c27b7-08c7-4d1e-b151-3b092180bfc9-kube-api-access-qsjx2\") pod \"c38c27b7-08c7-4d1e-b151-3b092180bfc9\" (UID: \"c38c27b7-08c7-4d1e-b151-3b092180bfc9\") " Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.288028 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2q4mh\" (UniqueName: \"kubernetes.io/projected/835855e8-ece2-4510-848b-f2851ba7aaa4-kube-api-access-2q4mh\") pod \"835855e8-ece2-4510-848b-f2851ba7aaa4\" (UID: \"835855e8-ece2-4510-848b-f2851ba7aaa4\") " Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.288174 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlvnh\" (UniqueName: \"kubernetes.io/projected/801c2fbd-0d18-4528-9baf-8ace34906cfa-kube-api-access-tlvnh\") pod \"801c2fbd-0d18-4528-9baf-8ace34906cfa\" (UID: \"801c2fbd-0d18-4528-9baf-8ace34906cfa\") " Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.288574 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8twlc\" (UniqueName: \"kubernetes.io/projected/1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a-kube-api-access-8twlc\") pod \"glance-5c12-account-create-w4fmn\" (UID: \"1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a\") " pod="openstack/glance-5c12-account-create-w4fmn" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.295076 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/801c2fbd-0d18-4528-9baf-8ace34906cfa-kube-api-access-tlvnh" (OuterVolumeSpecName: "kube-api-access-tlvnh") pod "801c2fbd-0d18-4528-9baf-8ace34906cfa" (UID: "801c2fbd-0d18-4528-9baf-8ace34906cfa"). InnerVolumeSpecName "kube-api-access-tlvnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.296037 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/835855e8-ece2-4510-848b-f2851ba7aaa4-kube-api-access-2q4mh" (OuterVolumeSpecName: "kube-api-access-2q4mh") pod "835855e8-ece2-4510-848b-f2851ba7aaa4" (UID: "835855e8-ece2-4510-848b-f2851ba7aaa4"). InnerVolumeSpecName "kube-api-access-2q4mh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.297778 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c38c27b7-08c7-4d1e-b151-3b092180bfc9-kube-api-access-qsjx2" (OuterVolumeSpecName: "kube-api-access-qsjx2") pod "c38c27b7-08c7-4d1e-b151-3b092180bfc9" (UID: "c38c27b7-08c7-4d1e-b151-3b092180bfc9"). InnerVolumeSpecName "kube-api-access-qsjx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.311585 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8twlc\" (UniqueName: \"kubernetes.io/projected/1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a-kube-api-access-8twlc\") pod \"glance-5c12-account-create-w4fmn\" (UID: \"1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a\") " pod="openstack/glance-5c12-account-create-w4fmn" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.349994 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5c12-account-create-w4fmn" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.390062 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlvnh\" (UniqueName: \"kubernetes.io/projected/801c2fbd-0d18-4528-9baf-8ace34906cfa-kube-api-access-tlvnh\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.390098 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsjx2\" (UniqueName: \"kubernetes.io/projected/c38c27b7-08c7-4d1e-b151-3b092180bfc9-kube-api-access-qsjx2\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.390111 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2q4mh\" (UniqueName: \"kubernetes.io/projected/835855e8-ece2-4510-848b-f2851ba7aaa4-kube-api-access-2q4mh\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.394193 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57094072-3915-41c8-a4f8-35960aa068c4" path="/var/lib/kubelet/pods/57094072-3915-41c8-a4f8-35960aa068c4/volumes" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.502560 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-2c9c-account-create-9hs8k" event={"ID":"801c2fbd-0d18-4528-9baf-8ace34906cfa","Type":"ContainerDied","Data":"1cdc28e7b2405b1f6517001e9307b5d90310db683239a4df44db3905387033a2"} Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.503056 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1cdc28e7b2405b1f6517001e9307b5d90310db683239a4df44db3905387033a2" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.502773 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-2c9c-account-create-9hs8k" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.504989 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9b71-account-create-5mbkm" event={"ID":"c38c27b7-08c7-4d1e-b151-3b092180bfc9","Type":"ContainerDied","Data":"a00965f2392107d825f4328dad53d8980edd829bfcde0ba1425b4b71c269de8f"} Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.505036 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a00965f2392107d825f4328dad53d8980edd829bfcde0ba1425b4b71c269de8f" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.505258 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9b71-account-create-5mbkm" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.508317 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4tlvb-config-xpcw2" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.508640 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4tlvb-config-xpcw2" event={"ID":"38b9a04a-3f28-4fe7-8529-38195af0c035","Type":"ContainerDied","Data":"01b242913f847e135308064c78eb6f39b5f794a6e2b84b8ebc2ca64fffb25f9c"} Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.508690 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="01b242913f847e135308064c78eb6f39b5f794a6e2b84b8ebc2ca64fffb25f9c" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.513341 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-f3dc-account-create-ncmmt" event={"ID":"835855e8-ece2-4510-848b-f2851ba7aaa4","Type":"ContainerDied","Data":"e3c282b1d57e8f9ee0a6d6263d2ad1933167d29d3bc61f592bacefee75e8b3af"} Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.513384 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3c282b1d57e8f9ee0a6d6263d2ad1933167d29d3bc61f592bacefee75e8b3af" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.513388 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-f3dc-account-create-ncmmt" Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.568450 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:06:38 crc kubenswrapper[4730]: W0930 10:06:38.570882 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod711be93d_c342_44a5_aac9_ace1d09682a0.slice/crio-be460ee00e5e6131357dc0d1b42ce99d4d9caee73f3d5512358485f4db656511 WatchSource:0}: Error finding container be460ee00e5e6131357dc0d1b42ce99d4d9caee73f3d5512358485f4db656511: Status 404 returned error can't find the container with id be460ee00e5e6131357dc0d1b42ce99d4d9caee73f3d5512358485f4db656511 Sep 30 10:06:38 crc kubenswrapper[4730]: I0930 10:06:38.830713 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-5c12-account-create-w4fmn"] Sep 30 10:06:38 crc kubenswrapper[4730]: W0930 10:06:38.833385 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1de6415b_c8dc_47eb_9ec3_aad2fa6ecc0a.slice/crio-8a6d2a28dd12ca39001d991e79ec55815b824adea384ffe18dfe544d9c52418a WatchSource:0}: Error finding container 8a6d2a28dd12ca39001d991e79ec55815b824adea384ffe18dfe544d9c52418a: Status 404 returned error can't find the container with id 8a6d2a28dd12ca39001d991e79ec55815b824adea384ffe18dfe544d9c52418a Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.031236 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-4tlvb-config-xpcw2"] Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.038891 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-4tlvb-config-xpcw2"] Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.131511 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-4tlvb-config-wckbn"] Sep 30 10:06:39 crc kubenswrapper[4730]: E0930 10:06:39.131930 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="801c2fbd-0d18-4528-9baf-8ace34906cfa" containerName="mariadb-account-create" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.131955 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="801c2fbd-0d18-4528-9baf-8ace34906cfa" containerName="mariadb-account-create" Sep 30 10:06:39 crc kubenswrapper[4730]: E0930 10:06:39.131992 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="835855e8-ece2-4510-848b-f2851ba7aaa4" containerName="mariadb-account-create" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.132001 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="835855e8-ece2-4510-848b-f2851ba7aaa4" containerName="mariadb-account-create" Sep 30 10:06:39 crc kubenswrapper[4730]: E0930 10:06:39.132014 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c38c27b7-08c7-4d1e-b151-3b092180bfc9" containerName="mariadb-account-create" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.132023 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="c38c27b7-08c7-4d1e-b151-3b092180bfc9" containerName="mariadb-account-create" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.136561 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="835855e8-ece2-4510-848b-f2851ba7aaa4" containerName="mariadb-account-create" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.136636 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="c38c27b7-08c7-4d1e-b151-3b092180bfc9" containerName="mariadb-account-create" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.136682 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="801c2fbd-0d18-4528-9baf-8ace34906cfa" containerName="mariadb-account-create" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.137785 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.154385 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.180163 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4tlvb-config-wckbn"] Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.306632 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqwtc\" (UniqueName: \"kubernetes.io/projected/1413b585-d9dd-46fb-ae45-3e4737d13410-kube-api-access-xqwtc\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.306776 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-log-ovn\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.306845 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1413b585-d9dd-46fb-ae45-3e4737d13410-additional-scripts\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.306892 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1413b585-d9dd-46fb-ae45-3e4737d13410-scripts\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.306970 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-run\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.307059 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-run-ovn\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.408543 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-run-ovn\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.408738 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqwtc\" (UniqueName: \"kubernetes.io/projected/1413b585-d9dd-46fb-ae45-3e4737d13410-kube-api-access-xqwtc\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.408791 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-log-ovn\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.408818 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1413b585-d9dd-46fb-ae45-3e4737d13410-additional-scripts\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.408838 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1413b585-d9dd-46fb-ae45-3e4737d13410-scripts\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.408872 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-run\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.408991 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-run\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.409195 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-log-ovn\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.409863 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1413b585-d9dd-46fb-ae45-3e4737d13410-additional-scripts\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.409948 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-run-ovn\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.411102 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1413b585-d9dd-46fb-ae45-3e4737d13410-scripts\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.443501 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqwtc\" (UniqueName: \"kubernetes.io/projected/1413b585-d9dd-46fb-ae45-3e4737d13410-kube-api-access-xqwtc\") pod \"ovn-controller-4tlvb-config-wckbn\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.481772 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.533021 4730 generic.go:334] "Generic (PLEG): container finished" podID="1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a" containerID="05867dce8da04c08197385d735995f18e092ab8c86e8025fabd04400ed987251" exitCode=0 Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.533105 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5c12-account-create-w4fmn" event={"ID":"1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a","Type":"ContainerDied","Data":"05867dce8da04c08197385d735995f18e092ab8c86e8025fabd04400ed987251"} Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.533139 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5c12-account-create-w4fmn" event={"ID":"1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a","Type":"ContainerStarted","Data":"8a6d2a28dd12ca39001d991e79ec55815b824adea384ffe18dfe544d9c52418a"} Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.535130 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"711be93d-c342-44a5-aac9-ace1d09682a0","Type":"ContainerStarted","Data":"be460ee00e5e6131357dc0d1b42ce99d4d9caee73f3d5512358485f4db656511"} Sep 30 10:06:39 crc kubenswrapper[4730]: I0930 10:06:39.921509 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-4tlvb-config-wckbn"] Sep 30 10:06:40 crc kubenswrapper[4730]: I0930 10:06:40.395080 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38b9a04a-3f28-4fe7-8529-38195af0c035" path="/var/lib/kubelet/pods/38b9a04a-3f28-4fe7-8529-38195af0c035/volumes" Sep 30 10:06:40 crc kubenswrapper[4730]: I0930 10:06:40.545254 4730 generic.go:334] "Generic (PLEG): container finished" podID="1413b585-d9dd-46fb-ae45-3e4737d13410" containerID="5dc6298b7f974539f124328135a3b32888083616c136c7cd162d72c85f31b1c9" exitCode=0 Sep 30 10:06:40 crc kubenswrapper[4730]: I0930 10:06:40.545359 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4tlvb-config-wckbn" event={"ID":"1413b585-d9dd-46fb-ae45-3e4737d13410","Type":"ContainerDied","Data":"5dc6298b7f974539f124328135a3b32888083616c136c7cd162d72c85f31b1c9"} Sep 30 10:06:40 crc kubenswrapper[4730]: I0930 10:06:40.545405 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4tlvb-config-wckbn" event={"ID":"1413b585-d9dd-46fb-ae45-3e4737d13410","Type":"ContainerStarted","Data":"37ca16afd6e0474e29f9074aa48bbaf993c3479715f266a7e88c5747ab98453e"} Sep 30 10:06:41 crc kubenswrapper[4730]: I0930 10:06:41.035733 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5c12-account-create-w4fmn" Sep 30 10:06:41 crc kubenswrapper[4730]: I0930 10:06:41.140933 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8twlc\" (UniqueName: \"kubernetes.io/projected/1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a-kube-api-access-8twlc\") pod \"1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a\" (UID: \"1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a\") " Sep 30 10:06:41 crc kubenswrapper[4730]: I0930 10:06:41.165755 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a-kube-api-access-8twlc" (OuterVolumeSpecName: "kube-api-access-8twlc") pod "1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a" (UID: "1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a"). InnerVolumeSpecName "kube-api-access-8twlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:41 crc kubenswrapper[4730]: I0930 10:06:41.242883 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8twlc\" (UniqueName: \"kubernetes.io/projected/1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a-kube-api-access-8twlc\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:41 crc kubenswrapper[4730]: I0930 10:06:41.557310 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-5c12-account-create-w4fmn" event={"ID":"1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a","Type":"ContainerDied","Data":"8a6d2a28dd12ca39001d991e79ec55815b824adea384ffe18dfe544d9c52418a"} Sep 30 10:06:41 crc kubenswrapper[4730]: I0930 10:06:41.557385 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a6d2a28dd12ca39001d991e79ec55815b824adea384ffe18dfe544d9c52418a" Sep 30 10:06:41 crc kubenswrapper[4730]: I0930 10:06:41.558066 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-5c12-account-create-w4fmn" Sep 30 10:06:41 crc kubenswrapper[4730]: I0930 10:06:41.559411 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"711be93d-c342-44a5-aac9-ace1d09682a0","Type":"ContainerStarted","Data":"b12066acb5d61484a5a6d994e6136a6fb5d60df22d47239fe02e68f5b58fdfaf"} Sep 30 10:06:41 crc kubenswrapper[4730]: I0930 10:06:41.981050 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.156529 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-run-ovn\") pod \"1413b585-d9dd-46fb-ae45-3e4737d13410\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.156595 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-log-ovn\") pod \"1413b585-d9dd-46fb-ae45-3e4737d13410\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.156657 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-run\") pod \"1413b585-d9dd-46fb-ae45-3e4737d13410\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.156695 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "1413b585-d9dd-46fb-ae45-3e4737d13410" (UID: "1413b585-d9dd-46fb-ae45-3e4737d13410"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.156735 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqwtc\" (UniqueName: \"kubernetes.io/projected/1413b585-d9dd-46fb-ae45-3e4737d13410-kube-api-access-xqwtc\") pod \"1413b585-d9dd-46fb-ae45-3e4737d13410\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.156826 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1413b585-d9dd-46fb-ae45-3e4737d13410-additional-scripts\") pod \"1413b585-d9dd-46fb-ae45-3e4737d13410\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.156742 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "1413b585-d9dd-46fb-ae45-3e4737d13410" (UID: "1413b585-d9dd-46fb-ae45-3e4737d13410"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.156948 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1413b585-d9dd-46fb-ae45-3e4737d13410-scripts\") pod \"1413b585-d9dd-46fb-ae45-3e4737d13410\" (UID: \"1413b585-d9dd-46fb-ae45-3e4737d13410\") " Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.156757 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-run" (OuterVolumeSpecName: "var-run") pod "1413b585-d9dd-46fb-ae45-3e4737d13410" (UID: "1413b585-d9dd-46fb-ae45-3e4737d13410"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.157281 4730 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.157293 4730 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.157301 4730 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1413b585-d9dd-46fb-ae45-3e4737d13410-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.157708 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1413b585-d9dd-46fb-ae45-3e4737d13410-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "1413b585-d9dd-46fb-ae45-3e4737d13410" (UID: "1413b585-d9dd-46fb-ae45-3e4737d13410"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.157927 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1413b585-d9dd-46fb-ae45-3e4737d13410-scripts" (OuterVolumeSpecName: "scripts") pod "1413b585-d9dd-46fb-ae45-3e4737d13410" (UID: "1413b585-d9dd-46fb-ae45-3e4737d13410"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.160736 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1413b585-d9dd-46fb-ae45-3e4737d13410-kube-api-access-xqwtc" (OuterVolumeSpecName: "kube-api-access-xqwtc") pod "1413b585-d9dd-46fb-ae45-3e4737d13410" (UID: "1413b585-d9dd-46fb-ae45-3e4737d13410"). InnerVolumeSpecName "kube-api-access-xqwtc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.258859 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqwtc\" (UniqueName: \"kubernetes.io/projected/1413b585-d9dd-46fb-ae45-3e4737d13410-kube-api-access-xqwtc\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.258902 4730 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1413b585-d9dd-46fb-ae45-3e4737d13410-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.258912 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1413b585-d9dd-46fb-ae45-3e4737d13410-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.567896 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-4tlvb-config-wckbn" Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.568651 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-4tlvb-config-wckbn" event={"ID":"1413b585-d9dd-46fb-ae45-3e4737d13410","Type":"ContainerDied","Data":"37ca16afd6e0474e29f9074aa48bbaf993c3479715f266a7e88c5747ab98453e"} Sep 30 10:06:42 crc kubenswrapper[4730]: I0930 10:06:42.568675 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37ca16afd6e0474e29f9074aa48bbaf993c3479715f266a7e88c5747ab98453e" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.052319 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-4tlvb-config-wckbn"] Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.058150 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-4tlvb-config-wckbn"] Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.173466 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-lqkp9"] Sep 30 10:06:43 crc kubenswrapper[4730]: E0930 10:06:43.173811 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a" containerName="mariadb-account-create" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.173829 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a" containerName="mariadb-account-create" Sep 30 10:06:43 crc kubenswrapper[4730]: E0930 10:06:43.173847 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1413b585-d9dd-46fb-ae45-3e4737d13410" containerName="ovn-config" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.173854 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1413b585-d9dd-46fb-ae45-3e4737d13410" containerName="ovn-config" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.174022 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a" containerName="mariadb-account-create" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.174036 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="1413b585-d9dd-46fb-ae45-3e4737d13410" containerName="ovn-config" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.174573 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.181627 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.181633 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-6c8vd" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.184955 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-lqkp9"] Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.275572 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-config-data\") pod \"glance-db-sync-lqkp9\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.275657 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-combined-ca-bundle\") pod \"glance-db-sync-lqkp9\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.275678 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhx8b\" (UniqueName: \"kubernetes.io/projected/0dc68fe6-3a0f-4069-999a-65cf283c50e2-kube-api-access-hhx8b\") pod \"glance-db-sync-lqkp9\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.275702 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-db-sync-config-data\") pod \"glance-db-sync-lqkp9\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.377261 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-config-data\") pod \"glance-db-sync-lqkp9\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.377349 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-combined-ca-bundle\") pod \"glance-db-sync-lqkp9\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.377379 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhx8b\" (UniqueName: \"kubernetes.io/projected/0dc68fe6-3a0f-4069-999a-65cf283c50e2-kube-api-access-hhx8b\") pod \"glance-db-sync-lqkp9\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.377407 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-db-sync-config-data\") pod \"glance-db-sync-lqkp9\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.383358 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-config-data\") pod \"glance-db-sync-lqkp9\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.383713 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-db-sync-config-data\") pod \"glance-db-sync-lqkp9\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.386001 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-combined-ca-bundle\") pod \"glance-db-sync-lqkp9\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.396431 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhx8b\" (UniqueName: \"kubernetes.io/projected/0dc68fe6-3a0f-4069-999a-65cf283c50e2-kube-api-access-hhx8b\") pod \"glance-db-sync-lqkp9\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:43 crc kubenswrapper[4730]: I0930 10:06:43.494200 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lqkp9" Sep 30 10:06:44 crc kubenswrapper[4730]: I0930 10:06:44.208383 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-lqkp9"] Sep 30 10:06:44 crc kubenswrapper[4730]: I0930 10:06:44.391909 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1413b585-d9dd-46fb-ae45-3e4737d13410" path="/var/lib/kubelet/pods/1413b585-d9dd-46fb-ae45-3e4737d13410/volumes" Sep 30 10:06:44 crc kubenswrapper[4730]: I0930 10:06:44.585651 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lqkp9" event={"ID":"0dc68fe6-3a0f-4069-999a-65cf283c50e2","Type":"ContainerStarted","Data":"ea46a25f35e6154230585b8d8bf84f8a4a9f9e451f4fe1376132eca11a0c9fde"} Sep 30 10:06:48 crc kubenswrapper[4730]: I0930 10:06:48.622324 4730 generic.go:334] "Generic (PLEG): container finished" podID="711be93d-c342-44a5-aac9-ace1d09682a0" containerID="b12066acb5d61484a5a6d994e6136a6fb5d60df22d47239fe02e68f5b58fdfaf" exitCode=0 Sep 30 10:06:48 crc kubenswrapper[4730]: I0930 10:06:48.622415 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"711be93d-c342-44a5-aac9-ace1d09682a0","Type":"ContainerDied","Data":"b12066acb5d61484a5a6d994e6136a6fb5d60df22d47239fe02e68f5b58fdfaf"} Sep 30 10:06:48 crc kubenswrapper[4730]: I0930 10:06:48.820536 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="6cf0ebea-06fc-47b2-a2c6-95605e023f94" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.108:5671: connect: connection refused" Sep 30 10:06:49 crc kubenswrapper[4730]: I0930 10:06:49.079442 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.109:5671: connect: connection refused" Sep 30 10:06:49 crc kubenswrapper[4730]: I0930 10:06:49.370746 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-notifications-server-0" podUID="7ba6b518-edfa-4d19-b096-03d7d96c51a3" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.110:5671: connect: connection refused" Sep 30 10:06:57 crc kubenswrapper[4730]: I0930 10:06:57.698032 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lqkp9" event={"ID":"0dc68fe6-3a0f-4069-999a-65cf283c50e2","Type":"ContainerStarted","Data":"779ecda634a65afefa3d576c756fe0a9d9d227fea94c080c59a5426b95bbb0c4"} Sep 30 10:06:57 crc kubenswrapper[4730]: I0930 10:06:57.702573 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"711be93d-c342-44a5-aac9-ace1d09682a0","Type":"ContainerStarted","Data":"a561ef0dd4d1e16e5f8d236ce3741951e8ea4002afbc19febbe41202eecc609d"} Sep 30 10:06:57 crc kubenswrapper[4730]: I0930 10:06:57.722397 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-lqkp9" podStartSLOduration=2.291822931 podStartE2EDuration="14.722373596s" podCreationTimestamp="2025-09-30 10:06:43 +0000 UTC" firstStartedPulling="2025-09-30 10:06:44.23418122 +0000 UTC m=+1048.567441213" lastFinishedPulling="2025-09-30 10:06:56.664731885 +0000 UTC m=+1060.997991878" observedRunningTime="2025-09-30 10:06:57.714596688 +0000 UTC m=+1062.047856681" watchObservedRunningTime="2025-09-30 10:06:57.722373596 +0000 UTC m=+1062.055633589" Sep 30 10:06:58 crc kubenswrapper[4730]: I0930 10:06:58.820228 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.078898 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.234786 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-9ghfp"] Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.236070 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9ghfp" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.247746 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-9ghfp"] Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.328492 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-g2h6p"] Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.331808 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g2h6p" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.338169 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-g2h6p"] Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.354363 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfkf4\" (UniqueName: \"kubernetes.io/projected/8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4-kube-api-access-xfkf4\") pod \"cinder-db-create-9ghfp\" (UID: \"8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4\") " pod="openstack/cinder-db-create-9ghfp" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.371811 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-notifications-server-0" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.455997 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgnzz\" (UniqueName: \"kubernetes.io/projected/35caf426-5c87-4f91-ad73-00a113363a23-kube-api-access-zgnzz\") pod \"barbican-db-create-g2h6p\" (UID: \"35caf426-5c87-4f91-ad73-00a113363a23\") " pod="openstack/barbican-db-create-g2h6p" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.456289 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfkf4\" (UniqueName: \"kubernetes.io/projected/8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4-kube-api-access-xfkf4\") pod \"cinder-db-create-9ghfp\" (UID: \"8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4\") " pod="openstack/cinder-db-create-9ghfp" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.503689 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfkf4\" (UniqueName: \"kubernetes.io/projected/8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4-kube-api-access-xfkf4\") pod \"cinder-db-create-9ghfp\" (UID: \"8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4\") " pod="openstack/cinder-db-create-9ghfp" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.507568 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-b9drt"] Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.509140 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-b9drt" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.514063 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.514076 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.514841 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-wg7cm" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.514992 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.529125 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-b9drt"] Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.552360 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9ghfp" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.558497 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgnzz\" (UniqueName: \"kubernetes.io/projected/35caf426-5c87-4f91-ad73-00a113363a23-kube-api-access-zgnzz\") pod \"barbican-db-create-g2h6p\" (UID: \"35caf426-5c87-4f91-ad73-00a113363a23\") " pod="openstack/barbican-db-create-g2h6p" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.578572 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgnzz\" (UniqueName: \"kubernetes.io/projected/35caf426-5c87-4f91-ad73-00a113363a23-kube-api-access-zgnzz\") pod \"barbican-db-create-g2h6p\" (UID: \"35caf426-5c87-4f91-ad73-00a113363a23\") " pod="openstack/barbican-db-create-g2h6p" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.635013 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-zrxjm"] Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.644041 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-zrxjm" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.655216 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g2h6p" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.659985 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-zrxjm"] Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.660820 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914446c5-8bb0-4025-aa12-1ddd46cda6d0-combined-ca-bundle\") pod \"keystone-db-sync-b9drt\" (UID: \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\") " pod="openstack/keystone-db-sync-b9drt" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.660938 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914446c5-8bb0-4025-aa12-1ddd46cda6d0-config-data\") pod \"keystone-db-sync-b9drt\" (UID: \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\") " pod="openstack/keystone-db-sync-b9drt" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.660969 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9zgn\" (UniqueName: \"kubernetes.io/projected/914446c5-8bb0-4025-aa12-1ddd46cda6d0-kube-api-access-v9zgn\") pod \"keystone-db-sync-b9drt\" (UID: \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\") " pod="openstack/keystone-db-sync-b9drt" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.729523 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"711be93d-c342-44a5-aac9-ace1d09682a0","Type":"ContainerStarted","Data":"efae9502c6e2b8bf6458b3ffeccb24c8a6b267c7cff07ffe3c4a6e19b8b4bc04"} Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.764900 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914446c5-8bb0-4025-aa12-1ddd46cda6d0-combined-ca-bundle\") pod \"keystone-db-sync-b9drt\" (UID: \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\") " pod="openstack/keystone-db-sync-b9drt" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.765001 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b652\" (UniqueName: \"kubernetes.io/projected/9e670747-fb83-4ee4-a83f-c6d2d06f213e-kube-api-access-7b652\") pod \"neutron-db-create-zrxjm\" (UID: \"9e670747-fb83-4ee4-a83f-c6d2d06f213e\") " pod="openstack/neutron-db-create-zrxjm" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.765125 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914446c5-8bb0-4025-aa12-1ddd46cda6d0-config-data\") pod \"keystone-db-sync-b9drt\" (UID: \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\") " pod="openstack/keystone-db-sync-b9drt" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.765157 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9zgn\" (UniqueName: \"kubernetes.io/projected/914446c5-8bb0-4025-aa12-1ddd46cda6d0-kube-api-access-v9zgn\") pod \"keystone-db-sync-b9drt\" (UID: \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\") " pod="openstack/keystone-db-sync-b9drt" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.770464 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914446c5-8bb0-4025-aa12-1ddd46cda6d0-combined-ca-bundle\") pod \"keystone-db-sync-b9drt\" (UID: \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\") " pod="openstack/keystone-db-sync-b9drt" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.773203 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914446c5-8bb0-4025-aa12-1ddd46cda6d0-config-data\") pod \"keystone-db-sync-b9drt\" (UID: \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\") " pod="openstack/keystone-db-sync-b9drt" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.785296 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9zgn\" (UniqueName: \"kubernetes.io/projected/914446c5-8bb0-4025-aa12-1ddd46cda6d0-kube-api-access-v9zgn\") pod \"keystone-db-sync-b9drt\" (UID: \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\") " pod="openstack/keystone-db-sync-b9drt" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.843625 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-b9drt" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.866390 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b652\" (UniqueName: \"kubernetes.io/projected/9e670747-fb83-4ee4-a83f-c6d2d06f213e-kube-api-access-7b652\") pod \"neutron-db-create-zrxjm\" (UID: \"9e670747-fb83-4ee4-a83f-c6d2d06f213e\") " pod="openstack/neutron-db-create-zrxjm" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.891263 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b652\" (UniqueName: \"kubernetes.io/projected/9e670747-fb83-4ee4-a83f-c6d2d06f213e-kube-api-access-7b652\") pod \"neutron-db-create-zrxjm\" (UID: \"9e670747-fb83-4ee4-a83f-c6d2d06f213e\") " pod="openstack/neutron-db-create-zrxjm" Sep 30 10:06:59 crc kubenswrapper[4730]: I0930 10:06:59.985196 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-zrxjm" Sep 30 10:07:00 crc kubenswrapper[4730]: I0930 10:07:00.175504 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-9ghfp"] Sep 30 10:07:00 crc kubenswrapper[4730]: I0930 10:07:00.307431 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-g2h6p"] Sep 30 10:07:00 crc kubenswrapper[4730]: W0930 10:07:00.337997 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35caf426_5c87_4f91_ad73_00a113363a23.slice/crio-bcca44ec18f28148122af2bdba1fd2fe9cfe6b045669c1044a731f3cf64bbfda WatchSource:0}: Error finding container bcca44ec18f28148122af2bdba1fd2fe9cfe6b045669c1044a731f3cf64bbfda: Status 404 returned error can't find the container with id bcca44ec18f28148122af2bdba1fd2fe9cfe6b045669c1044a731f3cf64bbfda Sep 30 10:07:00 crc kubenswrapper[4730]: I0930 10:07:00.584380 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-b9drt"] Sep 30 10:07:00 crc kubenswrapper[4730]: I0930 10:07:00.697577 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-zrxjm"] Sep 30 10:07:00 crc kubenswrapper[4730]: I0930 10:07:00.775058 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"711be93d-c342-44a5-aac9-ace1d09682a0","Type":"ContainerStarted","Data":"af16d88b26372b7af8fd00163d08b8dcabdf539a068578c7c5ab22e51ec68665"} Sep 30 10:07:00 crc kubenswrapper[4730]: I0930 10:07:00.782060 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-b9drt" event={"ID":"914446c5-8bb0-4025-aa12-1ddd46cda6d0","Type":"ContainerStarted","Data":"1d5af00ab733c3f75a6c37ec65ee767977c0823fa53d8a71221d688c77716c4a"} Sep 30 10:07:00 crc kubenswrapper[4730]: I0930 10:07:00.783825 4730 generic.go:334] "Generic (PLEG): container finished" podID="8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4" containerID="5ab2c7a83e471040613b45bb037b3115907eef1d25dbb0a4a1e57cda4db60eb6" exitCode=0 Sep 30 10:07:00 crc kubenswrapper[4730]: I0930 10:07:00.783885 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9ghfp" event={"ID":"8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4","Type":"ContainerDied","Data":"5ab2c7a83e471040613b45bb037b3115907eef1d25dbb0a4a1e57cda4db60eb6"} Sep 30 10:07:00 crc kubenswrapper[4730]: I0930 10:07:00.783907 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9ghfp" event={"ID":"8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4","Type":"ContainerStarted","Data":"6cbdd6dc4328c5b0fc570cbe5611cc7ef8a38a07fdf385a5cecf41253aacde2f"} Sep 30 10:07:00 crc kubenswrapper[4730]: I0930 10:07:00.785645 4730 generic.go:334] "Generic (PLEG): container finished" podID="35caf426-5c87-4f91-ad73-00a113363a23" containerID="ad67f1ae198f1a9e8882d912eaf493f81ccdd573952c684bd5c5e5400478e833" exitCode=0 Sep 30 10:07:00 crc kubenswrapper[4730]: I0930 10:07:00.785681 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-g2h6p" event={"ID":"35caf426-5c87-4f91-ad73-00a113363a23","Type":"ContainerDied","Data":"ad67f1ae198f1a9e8882d912eaf493f81ccdd573952c684bd5c5e5400478e833"} Sep 30 10:07:00 crc kubenswrapper[4730]: I0930 10:07:00.785703 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-g2h6p" event={"ID":"35caf426-5c87-4f91-ad73-00a113363a23","Type":"ContainerStarted","Data":"bcca44ec18f28148122af2bdba1fd2fe9cfe6b045669c1044a731f3cf64bbfda"} Sep 30 10:07:00 crc kubenswrapper[4730]: I0930 10:07:00.849942 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=23.840599859 podStartE2EDuration="23.840599859s" podCreationTimestamp="2025-09-30 10:06:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:07:00.815170342 +0000 UTC m=+1065.148430355" watchObservedRunningTime="2025-09-30 10:07:00.840599859 +0000 UTC m=+1065.173859842" Sep 30 10:07:01 crc kubenswrapper[4730]: I0930 10:07:01.800245 4730 generic.go:334] "Generic (PLEG): container finished" podID="9e670747-fb83-4ee4-a83f-c6d2d06f213e" containerID="eb786361a6739649eda2a6096de792a7ad71d8c4af630060dbe747389c811438" exitCode=0 Sep 30 10:07:01 crc kubenswrapper[4730]: I0930 10:07:01.800399 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-zrxjm" event={"ID":"9e670747-fb83-4ee4-a83f-c6d2d06f213e","Type":"ContainerDied","Data":"eb786361a6739649eda2a6096de792a7ad71d8c4af630060dbe747389c811438"} Sep 30 10:07:01 crc kubenswrapper[4730]: I0930 10:07:01.800805 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-zrxjm" event={"ID":"9e670747-fb83-4ee4-a83f-c6d2d06f213e","Type":"ContainerStarted","Data":"d342014628dd628c6a767b40d3a5f510c6b936194b308d4a557236e5ea1bec63"} Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.308755 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g2h6p" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.313000 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-db-sync-47ddk"] Sep 30 10:07:02 crc kubenswrapper[4730]: E0930 10:07:02.313624 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35caf426-5c87-4f91-ad73-00a113363a23" containerName="mariadb-database-create" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.313650 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="35caf426-5c87-4f91-ad73-00a113363a23" containerName="mariadb-database-create" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.313831 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="35caf426-5c87-4f91-ad73-00a113363a23" containerName="mariadb-database-create" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.314426 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.317029 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-config-data" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.317201 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-watcher-dockercfg-4wgss" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.325937 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-47ddk"] Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.340162 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.340352 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.439658 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9ghfp" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.441089 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgnzz\" (UniqueName: \"kubernetes.io/projected/35caf426-5c87-4f91-ad73-00a113363a23-kube-api-access-zgnzz\") pod \"35caf426-5c87-4f91-ad73-00a113363a23\" (UID: \"35caf426-5c87-4f91-ad73-00a113363a23\") " Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.441350 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45tk8\" (UniqueName: \"kubernetes.io/projected/f947945d-0a37-4104-95b5-d3437cd60556-kube-api-access-45tk8\") pod \"watcher-db-sync-47ddk\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.441392 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-db-sync-config-data\") pod \"watcher-db-sync-47ddk\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.441532 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-config-data\") pod \"watcher-db-sync-47ddk\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.441593 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-combined-ca-bundle\") pod \"watcher-db-sync-47ddk\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.449301 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35caf426-5c87-4f91-ad73-00a113363a23-kube-api-access-zgnzz" (OuterVolumeSpecName: "kube-api-access-zgnzz") pod "35caf426-5c87-4f91-ad73-00a113363a23" (UID: "35caf426-5c87-4f91-ad73-00a113363a23"). InnerVolumeSpecName "kube-api-access-zgnzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.542840 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfkf4\" (UniqueName: \"kubernetes.io/projected/8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4-kube-api-access-xfkf4\") pod \"8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4\" (UID: \"8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4\") " Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.543297 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-combined-ca-bundle\") pod \"watcher-db-sync-47ddk\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.543331 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45tk8\" (UniqueName: \"kubernetes.io/projected/f947945d-0a37-4104-95b5-d3437cd60556-kube-api-access-45tk8\") pod \"watcher-db-sync-47ddk\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.543811 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-db-sync-config-data\") pod \"watcher-db-sync-47ddk\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.543924 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-config-data\") pod \"watcher-db-sync-47ddk\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.544043 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgnzz\" (UniqueName: \"kubernetes.io/projected/35caf426-5c87-4f91-ad73-00a113363a23-kube-api-access-zgnzz\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.547346 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4-kube-api-access-xfkf4" (OuterVolumeSpecName: "kube-api-access-xfkf4") pod "8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4" (UID: "8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4"). InnerVolumeSpecName "kube-api-access-xfkf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.547949 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-combined-ca-bundle\") pod \"watcher-db-sync-47ddk\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.550289 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-config-data\") pod \"watcher-db-sync-47ddk\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.553714 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-db-sync-config-data\") pod \"watcher-db-sync-47ddk\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.560751 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45tk8\" (UniqueName: \"kubernetes.io/projected/f947945d-0a37-4104-95b5-d3437cd60556-kube-api-access-45tk8\") pod \"watcher-db-sync-47ddk\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.645935 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfkf4\" (UniqueName: \"kubernetes.io/projected/8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4-kube-api-access-xfkf4\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.741480 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.817292 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-g2h6p" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.817284 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-g2h6p" event={"ID":"35caf426-5c87-4f91-ad73-00a113363a23","Type":"ContainerDied","Data":"bcca44ec18f28148122af2bdba1fd2fe9cfe6b045669c1044a731f3cf64bbfda"} Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.817850 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bcca44ec18f28148122af2bdba1fd2fe9cfe6b045669c1044a731f3cf64bbfda" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.822136 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9ghfp" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.823764 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9ghfp" event={"ID":"8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4","Type":"ContainerDied","Data":"6cbdd6dc4328c5b0fc570cbe5611cc7ef8a38a07fdf385a5cecf41253aacde2f"} Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.823829 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cbdd6dc4328c5b0fc570cbe5611cc7ef8a38a07fdf385a5cecf41253aacde2f" Sep 30 10:07:02 crc kubenswrapper[4730]: I0930 10:07:02.945693 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Sep 30 10:07:05 crc kubenswrapper[4730]: I0930 10:07:05.602967 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-zrxjm" Sep 30 10:07:05 crc kubenswrapper[4730]: I0930 10:07:05.698141 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b652\" (UniqueName: \"kubernetes.io/projected/9e670747-fb83-4ee4-a83f-c6d2d06f213e-kube-api-access-7b652\") pod \"9e670747-fb83-4ee4-a83f-c6d2d06f213e\" (UID: \"9e670747-fb83-4ee4-a83f-c6d2d06f213e\") " Sep 30 10:07:05 crc kubenswrapper[4730]: I0930 10:07:05.703298 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e670747-fb83-4ee4-a83f-c6d2d06f213e-kube-api-access-7b652" (OuterVolumeSpecName: "kube-api-access-7b652") pod "9e670747-fb83-4ee4-a83f-c6d2d06f213e" (UID: "9e670747-fb83-4ee4-a83f-c6d2d06f213e"). InnerVolumeSpecName "kube-api-access-7b652". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:05 crc kubenswrapper[4730]: I0930 10:07:05.799829 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b652\" (UniqueName: \"kubernetes.io/projected/9e670747-fb83-4ee4-a83f-c6d2d06f213e-kube-api-access-7b652\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:05 crc kubenswrapper[4730]: I0930 10:07:05.850704 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-zrxjm" Sep 30 10:07:05 crc kubenswrapper[4730]: I0930 10:07:05.850702 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-zrxjm" event={"ID":"9e670747-fb83-4ee4-a83f-c6d2d06f213e","Type":"ContainerDied","Data":"d342014628dd628c6a767b40d3a5f510c6b936194b308d4a557236e5ea1bec63"} Sep 30 10:07:05 crc kubenswrapper[4730]: I0930 10:07:05.850847 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d342014628dd628c6a767b40d3a5f510c6b936194b308d4a557236e5ea1bec63" Sep 30 10:07:05 crc kubenswrapper[4730]: I0930 10:07:05.852389 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-b9drt" event={"ID":"914446c5-8bb0-4025-aa12-1ddd46cda6d0","Type":"ContainerStarted","Data":"443a624f63f51b54a1283255625c41610ae2a8fd98889be3b262b5eb0d5d3157"} Sep 30 10:07:05 crc kubenswrapper[4730]: I0930 10:07:05.856582 4730 generic.go:334] "Generic (PLEG): container finished" podID="0dc68fe6-3a0f-4069-999a-65cf283c50e2" containerID="779ecda634a65afefa3d576c756fe0a9d9d227fea94c080c59a5426b95bbb0c4" exitCode=0 Sep 30 10:07:05 crc kubenswrapper[4730]: I0930 10:07:05.856641 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lqkp9" event={"ID":"0dc68fe6-3a0f-4069-999a-65cf283c50e2","Type":"ContainerDied","Data":"779ecda634a65afefa3d576c756fe0a9d9d227fea94c080c59a5426b95bbb0c4"} Sep 30 10:07:05 crc kubenswrapper[4730]: I0930 10:07:05.876550 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-b9drt" podStartSLOduration=1.875144108 podStartE2EDuration="6.8765271s" podCreationTimestamp="2025-09-30 10:06:59 +0000 UTC" firstStartedPulling="2025-09-30 10:07:00.602209726 +0000 UTC m=+1064.935469719" lastFinishedPulling="2025-09-30 10:07:05.603592718 +0000 UTC m=+1069.936852711" observedRunningTime="2025-09-30 10:07:05.867156671 +0000 UTC m=+1070.200416664" watchObservedRunningTime="2025-09-30 10:07:05.8765271 +0000 UTC m=+1070.209787093" Sep 30 10:07:06 crc kubenswrapper[4730]: I0930 10:07:06.006285 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-db-sync-47ddk"] Sep 30 10:07:06 crc kubenswrapper[4730]: W0930 10:07:06.007408 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf947945d_0a37_4104_95b5_d3437cd60556.slice/crio-2f4bfa728642e4fccd50308e0416ce0ac8d2ac0bccb31204d47040ac119502d6 WatchSource:0}: Error finding container 2f4bfa728642e4fccd50308e0416ce0ac8d2ac0bccb31204d47040ac119502d6: Status 404 returned error can't find the container with id 2f4bfa728642e4fccd50308e0416ce0ac8d2ac0bccb31204d47040ac119502d6 Sep 30 10:07:06 crc kubenswrapper[4730]: I0930 10:07:06.874168 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-47ddk" event={"ID":"f947945d-0a37-4104-95b5-d3437cd60556","Type":"ContainerStarted","Data":"2f4bfa728642e4fccd50308e0416ce0ac8d2ac0bccb31204d47040ac119502d6"} Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.349431 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lqkp9" Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.432224 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhx8b\" (UniqueName: \"kubernetes.io/projected/0dc68fe6-3a0f-4069-999a-65cf283c50e2-kube-api-access-hhx8b\") pod \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.432276 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-db-sync-config-data\") pod \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.432346 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-config-data\") pod \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.432373 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-combined-ca-bundle\") pod \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\" (UID: \"0dc68fe6-3a0f-4069-999a-65cf283c50e2\") " Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.439675 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "0dc68fe6-3a0f-4069-999a-65cf283c50e2" (UID: "0dc68fe6-3a0f-4069-999a-65cf283c50e2"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.455988 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dc68fe6-3a0f-4069-999a-65cf283c50e2-kube-api-access-hhx8b" (OuterVolumeSpecName: "kube-api-access-hhx8b") pod "0dc68fe6-3a0f-4069-999a-65cf283c50e2" (UID: "0dc68fe6-3a0f-4069-999a-65cf283c50e2"). InnerVolumeSpecName "kube-api-access-hhx8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.457267 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0dc68fe6-3a0f-4069-999a-65cf283c50e2" (UID: "0dc68fe6-3a0f-4069-999a-65cf283c50e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.478498 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-config-data" (OuterVolumeSpecName: "config-data") pod "0dc68fe6-3a0f-4069-999a-65cf283c50e2" (UID: "0dc68fe6-3a0f-4069-999a-65cf283c50e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.534433 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.534473 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.534484 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhx8b\" (UniqueName: \"kubernetes.io/projected/0dc68fe6-3a0f-4069-999a-65cf283c50e2-kube-api-access-hhx8b\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.534492 4730 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0dc68fe6-3a0f-4069-999a-65cf283c50e2-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.885698 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lqkp9" event={"ID":"0dc68fe6-3a0f-4069-999a-65cf283c50e2","Type":"ContainerDied","Data":"ea46a25f35e6154230585b8d8bf84f8a4a9f9e451f4fe1376132eca11a0c9fde"} Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.885737 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea46a25f35e6154230585b8d8bf84f8a4a9f9e451f4fe1376132eca11a0c9fde" Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.885805 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lqkp9" Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.944963 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Sep 30 10:07:07 crc kubenswrapper[4730]: I0930 10:07:07.951804 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.199288 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6749b57f47-5wl2r"] Sep 30 10:07:08 crc kubenswrapper[4730]: E0930 10:07:08.200101 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e670747-fb83-4ee4-a83f-c6d2d06f213e" containerName="mariadb-database-create" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.200252 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e670747-fb83-4ee4-a83f-c6d2d06f213e" containerName="mariadb-database-create" Sep 30 10:07:08 crc kubenswrapper[4730]: E0930 10:07:08.200383 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4" containerName="mariadb-database-create" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.200548 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4" containerName="mariadb-database-create" Sep 30 10:07:08 crc kubenswrapper[4730]: E0930 10:07:08.200709 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dc68fe6-3a0f-4069-999a-65cf283c50e2" containerName="glance-db-sync" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.200814 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dc68fe6-3a0f-4069-999a-65cf283c50e2" containerName="glance-db-sync" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.201157 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dc68fe6-3a0f-4069-999a-65cf283c50e2" containerName="glance-db-sync" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.201282 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e670747-fb83-4ee4-a83f-c6d2d06f213e" containerName="mariadb-database-create" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.201396 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4" containerName="mariadb-database-create" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.205958 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.225009 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6749b57f47-5wl2r"] Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.244813 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-ovsdbserver-sb\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.245212 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-config\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.245257 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-ovsdbserver-nb\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.245503 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rsjs\" (UniqueName: \"kubernetes.io/projected/3b3bf560-1015-4c1d-b4c5-2960599d7a84-kube-api-access-4rsjs\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.245652 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-dns-svc\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.347340 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-dns-svc\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.347471 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-ovsdbserver-sb\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.347505 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-config\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.347536 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-ovsdbserver-nb\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.347587 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rsjs\" (UniqueName: \"kubernetes.io/projected/3b3bf560-1015-4c1d-b4c5-2960599d7a84-kube-api-access-4rsjs\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.348315 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-dns-svc\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.350346 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-ovsdbserver-sb\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.356312 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-config\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.357324 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-ovsdbserver-nb\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.365801 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rsjs\" (UniqueName: \"kubernetes.io/projected/3b3bf560-1015-4c1d-b4c5-2960599d7a84-kube-api-access-4rsjs\") pod \"dnsmasq-dns-6749b57f47-5wl2r\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.530830 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:08 crc kubenswrapper[4730]: I0930 10:07:08.898330 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.264588 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-18fc-account-create-9j7xr"] Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.265789 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-18fc-account-create-9j7xr" Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.267853 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.283413 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-18fc-account-create-9j7xr"] Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.360865 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8780-account-create-xzbgl"] Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.363319 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8780-account-create-xzbgl" Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.365038 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.366496 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzn84\" (UniqueName: \"kubernetes.io/projected/4bee9a35-25c0-44b5-8160-a2787eeea901-kube-api-access-jzn84\") pod \"cinder-18fc-account-create-9j7xr\" (UID: \"4bee9a35-25c0-44b5-8160-a2787eeea901\") " pod="openstack/cinder-18fc-account-create-9j7xr" Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.370147 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8780-account-create-xzbgl"] Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.468536 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzn84\" (UniqueName: \"kubernetes.io/projected/4bee9a35-25c0-44b5-8160-a2787eeea901-kube-api-access-jzn84\") pod \"cinder-18fc-account-create-9j7xr\" (UID: \"4bee9a35-25c0-44b5-8160-a2787eeea901\") " pod="openstack/cinder-18fc-account-create-9j7xr" Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.468593 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnxjt\" (UniqueName: \"kubernetes.io/projected/ce75a3c3-26fd-44b7-82c9-fe9edf0285fb-kube-api-access-tnxjt\") pod \"barbican-8780-account-create-xzbgl\" (UID: \"ce75a3c3-26fd-44b7-82c9-fe9edf0285fb\") " pod="openstack/barbican-8780-account-create-xzbgl" Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.491299 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzn84\" (UniqueName: \"kubernetes.io/projected/4bee9a35-25c0-44b5-8160-a2787eeea901-kube-api-access-jzn84\") pod \"cinder-18fc-account-create-9j7xr\" (UID: \"4bee9a35-25c0-44b5-8160-a2787eeea901\") " pod="openstack/cinder-18fc-account-create-9j7xr" Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.569807 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnxjt\" (UniqueName: \"kubernetes.io/projected/ce75a3c3-26fd-44b7-82c9-fe9edf0285fb-kube-api-access-tnxjt\") pod \"barbican-8780-account-create-xzbgl\" (UID: \"ce75a3c3-26fd-44b7-82c9-fe9edf0285fb\") " pod="openstack/barbican-8780-account-create-xzbgl" Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.586333 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnxjt\" (UniqueName: \"kubernetes.io/projected/ce75a3c3-26fd-44b7-82c9-fe9edf0285fb-kube-api-access-tnxjt\") pod \"barbican-8780-account-create-xzbgl\" (UID: \"ce75a3c3-26fd-44b7-82c9-fe9edf0285fb\") " pod="openstack/barbican-8780-account-create-xzbgl" Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.589252 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-18fc-account-create-9j7xr" Sep 30 10:07:09 crc kubenswrapper[4730]: I0930 10:07:09.687012 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8780-account-create-xzbgl" Sep 30 10:07:10 crc kubenswrapper[4730]: I0930 10:07:10.917424 4730 generic.go:334] "Generic (PLEG): container finished" podID="914446c5-8bb0-4025-aa12-1ddd46cda6d0" containerID="443a624f63f51b54a1283255625c41610ae2a8fd98889be3b262b5eb0d5d3157" exitCode=0 Sep 30 10:07:10 crc kubenswrapper[4730]: I0930 10:07:10.917555 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-b9drt" event={"ID":"914446c5-8bb0-4025-aa12-1ddd46cda6d0","Type":"ContainerDied","Data":"443a624f63f51b54a1283255625c41610ae2a8fd98889be3b262b5eb0d5d3157"} Sep 30 10:07:12 crc kubenswrapper[4730]: I0930 10:07:12.630200 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-b9drt" Sep 30 10:07:12 crc kubenswrapper[4730]: I0930 10:07:12.739200 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914446c5-8bb0-4025-aa12-1ddd46cda6d0-config-data\") pod \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\" (UID: \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\") " Sep 30 10:07:12 crc kubenswrapper[4730]: I0930 10:07:12.739419 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914446c5-8bb0-4025-aa12-1ddd46cda6d0-combined-ca-bundle\") pod \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\" (UID: \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\") " Sep 30 10:07:12 crc kubenswrapper[4730]: I0930 10:07:12.739464 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9zgn\" (UniqueName: \"kubernetes.io/projected/914446c5-8bb0-4025-aa12-1ddd46cda6d0-kube-api-access-v9zgn\") pod \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\" (UID: \"914446c5-8bb0-4025-aa12-1ddd46cda6d0\") " Sep 30 10:07:12 crc kubenswrapper[4730]: I0930 10:07:12.747905 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/914446c5-8bb0-4025-aa12-1ddd46cda6d0-kube-api-access-v9zgn" (OuterVolumeSpecName: "kube-api-access-v9zgn") pod "914446c5-8bb0-4025-aa12-1ddd46cda6d0" (UID: "914446c5-8bb0-4025-aa12-1ddd46cda6d0"). InnerVolumeSpecName "kube-api-access-v9zgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:12 crc kubenswrapper[4730]: I0930 10:07:12.768219 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/914446c5-8bb0-4025-aa12-1ddd46cda6d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "914446c5-8bb0-4025-aa12-1ddd46cda6d0" (UID: "914446c5-8bb0-4025-aa12-1ddd46cda6d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:12 crc kubenswrapper[4730]: I0930 10:07:12.785744 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/914446c5-8bb0-4025-aa12-1ddd46cda6d0-config-data" (OuterVolumeSpecName: "config-data") pod "914446c5-8bb0-4025-aa12-1ddd46cda6d0" (UID: "914446c5-8bb0-4025-aa12-1ddd46cda6d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:12 crc kubenswrapper[4730]: I0930 10:07:12.841759 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/914446c5-8bb0-4025-aa12-1ddd46cda6d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:12 crc kubenswrapper[4730]: I0930 10:07:12.841797 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9zgn\" (UniqueName: \"kubernetes.io/projected/914446c5-8bb0-4025-aa12-1ddd46cda6d0-kube-api-access-v9zgn\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:12 crc kubenswrapper[4730]: I0930 10:07:12.841810 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/914446c5-8bb0-4025-aa12-1ddd46cda6d0-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:12 crc kubenswrapper[4730]: I0930 10:07:12.938032 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-b9drt" event={"ID":"914446c5-8bb0-4025-aa12-1ddd46cda6d0","Type":"ContainerDied","Data":"1d5af00ab733c3f75a6c37ec65ee767977c0823fa53d8a71221d688c77716c4a"} Sep 30 10:07:12 crc kubenswrapper[4730]: I0930 10:07:12.938088 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d5af00ab733c3f75a6c37ec65ee767977c0823fa53d8a71221d688c77716c4a" Sep 30 10:07:12 crc kubenswrapper[4730]: I0930 10:07:12.938095 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-b9drt" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.079228 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6749b57f47-5wl2r"] Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.102796 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-lv4gb"] Sep 30 10:07:13 crc kubenswrapper[4730]: E0930 10:07:13.113434 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="914446c5-8bb0-4025-aa12-1ddd46cda6d0" containerName="keystone-db-sync" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.113722 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="914446c5-8bb0-4025-aa12-1ddd46cda6d0" containerName="keystone-db-sync" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.114010 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="914446c5-8bb0-4025-aa12-1ddd46cda6d0" containerName="keystone-db-sync" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.114873 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.120880 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.121178 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.121286 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-wg7cm" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.121372 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lv4gb"] Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.122948 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.134643 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-9b5f9766f-2w2fq"] Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.136489 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.166386 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9b5f9766f-2w2fq"] Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.254635 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-config-data\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.254691 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78n6d\" (UniqueName: \"kubernetes.io/projected/7b63b221-3873-4118-a434-cad816a03404-kube-api-access-78n6d\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.254726 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-dns-svc\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.254746 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-scripts\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.254766 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-combined-ca-bundle\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.254779 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-ovsdbserver-sb\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.254793 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-fernet-keys\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.254810 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-ovsdbserver-nb\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.254849 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-config\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.254865 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-credential-keys\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.254878 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7qc7\" (UniqueName: \"kubernetes.io/projected/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-kube-api-access-j7qc7\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.358449 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-config-data\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.358507 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78n6d\" (UniqueName: \"kubernetes.io/projected/7b63b221-3873-4118-a434-cad816a03404-kube-api-access-78n6d\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.358543 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-dns-svc\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.358567 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-scripts\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.358594 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-combined-ca-bundle\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.358665 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-ovsdbserver-sb\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.358690 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-fernet-keys\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.358712 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-ovsdbserver-nb\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.358762 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-config\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.358782 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-credential-keys\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.358800 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7qc7\" (UniqueName: \"kubernetes.io/projected/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-kube-api-access-j7qc7\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.359697 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-dns-svc\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.359776 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-ovsdbserver-nb\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.361487 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-ovsdbserver-sb\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.362090 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-config\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.381376 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-combined-ca-bundle\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.382976 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-credential-keys\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.385433 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-config-data\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.391309 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-fernet-keys\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.391870 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-scripts\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.442699 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7qc7\" (UniqueName: \"kubernetes.io/projected/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-kube-api-access-j7qc7\") pod \"keystone-bootstrap-lv4gb\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.453348 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.456390 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78n6d\" (UniqueName: \"kubernetes.io/projected/7b63b221-3873-4118-a434-cad816a03404-kube-api-access-78n6d\") pod \"dnsmasq-dns-9b5f9766f-2w2fq\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.463400 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.469018 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.496937 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-gcmkp"] Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.497389 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.497550 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.498367 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.503557 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.503860 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.504177 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-4ftjh" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.514655 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.541666 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-gcmkp"] Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.610455 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9b5f9766f-2w2fq"] Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.670150 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64e70143-f93b-4808-b388-4faaa7f8e51d-logs\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.670222 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-scripts\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.670257 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1acf6558-7e16-4d00-b19f-c757db81dc58-log-httpd\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.670299 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.670317 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-config-data\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.670340 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89g67\" (UniqueName: \"kubernetes.io/projected/64e70143-f93b-4808-b388-4faaa7f8e51d-kube-api-access-89g67\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.670366 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-scripts\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.670415 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-combined-ca-bundle\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.670440 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr2fv\" (UniqueName: \"kubernetes.io/projected/1acf6558-7e16-4d00-b19f-c757db81dc58-kube-api-access-kr2fv\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.670474 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.670512 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-config-data\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.670531 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1acf6558-7e16-4d00-b19f-c757db81dc58-run-httpd\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.740646 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.744339 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7d888b67c9-cpbvx"] Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.753535 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.755457 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d888b67c9-cpbvx"] Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772025 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1acf6558-7e16-4d00-b19f-c757db81dc58-run-httpd\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772070 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64e70143-f93b-4808-b388-4faaa7f8e51d-logs\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772107 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-scripts\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772148 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1acf6558-7e16-4d00-b19f-c757db81dc58-log-httpd\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772210 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772230 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-config-data\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772260 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89g67\" (UniqueName: \"kubernetes.io/projected/64e70143-f93b-4808-b388-4faaa7f8e51d-kube-api-access-89g67\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772294 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-scripts\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772335 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-combined-ca-bundle\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772368 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr2fv\" (UniqueName: \"kubernetes.io/projected/1acf6558-7e16-4d00-b19f-c757db81dc58-kube-api-access-kr2fv\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772404 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772447 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-config-data\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772790 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1acf6558-7e16-4d00-b19f-c757db81dc58-log-httpd\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.772969 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1acf6558-7e16-4d00-b19f-c757db81dc58-run-httpd\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.773187 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64e70143-f93b-4808-b388-4faaa7f8e51d-logs\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.776331 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-config-data\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.780240 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-config-data\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.796324 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-scripts\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.803032 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-scripts\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.803161 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-combined-ca-bundle\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.814336 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr2fv\" (UniqueName: \"kubernetes.io/projected/1acf6558-7e16-4d00-b19f-c757db81dc58-kube-api-access-kr2fv\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.814466 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.814660 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.816017 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89g67\" (UniqueName: \"kubernetes.io/projected/64e70143-f93b-4808-b388-4faaa7f8e51d-kube-api-access-89g67\") pod \"placement-db-sync-gcmkp\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.843166 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.879724 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-config\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.879920 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm4bj\" (UniqueName: \"kubernetes.io/projected/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-kube-api-access-dm4bj\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.879987 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-dns-svc\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.880019 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-ovsdbserver-sb\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.880088 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-ovsdbserver-nb\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.970677 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.983630 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm4bj\" (UniqueName: \"kubernetes.io/projected/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-kube-api-access-dm4bj\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.983707 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-dns-svc\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.983740 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-ovsdbserver-sb\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.983799 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-ovsdbserver-nb\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.983840 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-config\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.988636 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-config\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.989652 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-dns-svc\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.990340 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-ovsdbserver-sb\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:13 crc kubenswrapper[4730]: I0930 10:07:13.990972 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-ovsdbserver-nb\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:14 crc kubenswrapper[4730]: I0930 10:07:14.023523 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm4bj\" (UniqueName: \"kubernetes.io/projected/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-kube-api-access-dm4bj\") pod \"dnsmasq-dns-7d888b67c9-cpbvx\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:14 crc kubenswrapper[4730]: I0930 10:07:14.292145 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:14 crc kubenswrapper[4730]: I0930 10:07:14.467141 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6749b57f47-5wl2r"] Sep 30 10:07:14 crc kubenswrapper[4730]: I0930 10:07:14.625369 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8780-account-create-xzbgl"] Sep 30 10:07:14 crc kubenswrapper[4730]: W0930 10:07:14.627468 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce75a3c3_26fd_44b7_82c9_fe9edf0285fb.slice/crio-da5a394a85724f5e061a5d762974ef3e1102dfef44c146a39d6e5344ee061217 WatchSource:0}: Error finding container da5a394a85724f5e061a5d762974ef3e1102dfef44c146a39d6e5344ee061217: Status 404 returned error can't find the container with id da5a394a85724f5e061a5d762974ef3e1102dfef44c146a39d6e5344ee061217 Sep 30 10:07:14 crc kubenswrapper[4730]: I0930 10:07:14.995745 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-18fc-account-create-9j7xr"] Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.005750 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-gcmkp"] Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.008944 4730 generic.go:334] "Generic (PLEG): container finished" podID="3b3bf560-1015-4c1d-b4c5-2960599d7a84" containerID="a755cfc6cd1aaab58b47cdcc036ce9ceb35c6dab1d4441e5f47a1cc4b777945f" exitCode=0 Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.008980 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" event={"ID":"3b3bf560-1015-4c1d-b4c5-2960599d7a84","Type":"ContainerDied","Data":"a755cfc6cd1aaab58b47cdcc036ce9ceb35c6dab1d4441e5f47a1cc4b777945f"} Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.009017 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" event={"ID":"3b3bf560-1015-4c1d-b4c5-2960599d7a84","Type":"ContainerStarted","Data":"0ff2a6ca3dbb7ef6bac7ec5bbac7a2f8285f11b5a7cc143503a7375427607aee"} Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.012443 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.013685 4730 generic.go:334] "Generic (PLEG): container finished" podID="ce75a3c3-26fd-44b7-82c9-fe9edf0285fb" containerID="3c4f613823f91f03848acdb85599402e22d0c9e82601ce86c67cbc20cfb1abcd" exitCode=0 Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.013758 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8780-account-create-xzbgl" event={"ID":"ce75a3c3-26fd-44b7-82c9-fe9edf0285fb","Type":"ContainerDied","Data":"3c4f613823f91f03848acdb85599402e22d0c9e82601ce86c67cbc20cfb1abcd"} Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.013783 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8780-account-create-xzbgl" event={"ID":"ce75a3c3-26fd-44b7-82c9-fe9edf0285fb","Type":"ContainerStarted","Data":"da5a394a85724f5e061a5d762974ef3e1102dfef44c146a39d6e5344ee061217"} Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.019275 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lv4gb"] Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.035771 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-47ddk" event={"ID":"f947945d-0a37-4104-95b5-d3437cd60556","Type":"ContainerStarted","Data":"0b14d0d5ef1f55874424dcf50a7700149498d19a754323e68e72ddac52d24ebe"} Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.092588 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9b5f9766f-2w2fq"] Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.097137 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-db-sync-47ddk" podStartSLOduration=5.083109328 podStartE2EDuration="13.09711812s" podCreationTimestamp="2025-09-30 10:07:02 +0000 UTC" firstStartedPulling="2025-09-30 10:07:06.010086996 +0000 UTC m=+1070.343347009" lastFinishedPulling="2025-09-30 10:07:14.024095808 +0000 UTC m=+1078.357355801" observedRunningTime="2025-09-30 10:07:15.078544758 +0000 UTC m=+1079.411804751" watchObservedRunningTime="2025-09-30 10:07:15.09711812 +0000 UTC m=+1079.430378113" Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.112736 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d888b67c9-cpbvx"] Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.406061 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.445509 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.523008 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-config\") pod \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.523103 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rsjs\" (UniqueName: \"kubernetes.io/projected/3b3bf560-1015-4c1d-b4c5-2960599d7a84-kube-api-access-4rsjs\") pod \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.523129 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-ovsdbserver-sb\") pod \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.523173 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-dns-svc\") pod \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.523192 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-ovsdbserver-nb\") pod \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\" (UID: \"3b3bf560-1015-4c1d-b4c5-2960599d7a84\") " Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.534221 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b3bf560-1015-4c1d-b4c5-2960599d7a84-kube-api-access-4rsjs" (OuterVolumeSpecName: "kube-api-access-4rsjs") pod "3b3bf560-1015-4c1d-b4c5-2960599d7a84" (UID: "3b3bf560-1015-4c1d-b4c5-2960599d7a84"). InnerVolumeSpecName "kube-api-access-4rsjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.546894 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3b3bf560-1015-4c1d-b4c5-2960599d7a84" (UID: "3b3bf560-1015-4c1d-b4c5-2960599d7a84"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.572668 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3b3bf560-1015-4c1d-b4c5-2960599d7a84" (UID: "3b3bf560-1015-4c1d-b4c5-2960599d7a84"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.578550 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-config" (OuterVolumeSpecName: "config") pod "3b3bf560-1015-4c1d-b4c5-2960599d7a84" (UID: "3b3bf560-1015-4c1d-b4c5-2960599d7a84"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.588855 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3b3bf560-1015-4c1d-b4c5-2960599d7a84" (UID: "3b3bf560-1015-4c1d-b4c5-2960599d7a84"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.626378 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.626403 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.626412 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.626422 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rsjs\" (UniqueName: \"kubernetes.io/projected/3b3bf560-1015-4c1d-b4c5-2960599d7a84-kube-api-access-4rsjs\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:15 crc kubenswrapper[4730]: I0930 10:07:15.626431 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3b3bf560-1015-4c1d-b4c5-2960599d7a84-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.061025 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-gcmkp" event={"ID":"64e70143-f93b-4808-b388-4faaa7f8e51d","Type":"ContainerStarted","Data":"3724328affcb9ce63fbdb09f1ae3c687bb9b0cd8830693d880c43f376749d5bc"} Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.062969 4730 generic.go:334] "Generic (PLEG): container finished" podID="18e54bc5-0acd-437b-bce0-7cb0147c4ab1" containerID="0ffce5943abd58bf6d06872c1b8f6ed80c67947ad6ee098348b4d7ef2101da90" exitCode=0 Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.063022 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" event={"ID":"18e54bc5-0acd-437b-bce0-7cb0147c4ab1","Type":"ContainerDied","Data":"0ffce5943abd58bf6d06872c1b8f6ed80c67947ad6ee098348b4d7ef2101da90"} Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.063047 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" event={"ID":"18e54bc5-0acd-437b-bce0-7cb0147c4ab1","Type":"ContainerStarted","Data":"9a1f27061090bae89318daee89b1bbffc830268d7cd79f9358c08b4737c95818"} Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.105535 4730 generic.go:334] "Generic (PLEG): container finished" podID="4bee9a35-25c0-44b5-8160-a2787eeea901" containerID="d2f35c02d2be018649cdd9cfc8229e1ca2014710bd17f7dda84aa85f84ed5ac9" exitCode=0 Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.105824 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-18fc-account-create-9j7xr" event={"ID":"4bee9a35-25c0-44b5-8160-a2787eeea901","Type":"ContainerDied","Data":"d2f35c02d2be018649cdd9cfc8229e1ca2014710bd17f7dda84aa85f84ed5ac9"} Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.105914 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-18fc-account-create-9j7xr" event={"ID":"4bee9a35-25c0-44b5-8160-a2787eeea901","Type":"ContainerStarted","Data":"070cab18281b927409e1b1075342d1743631dbe0cb2ab89b894671a890bc27ef"} Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.112232 4730 generic.go:334] "Generic (PLEG): container finished" podID="7b63b221-3873-4118-a434-cad816a03404" containerID="9a6201f0cc1b769e80fe266bf19f0730dc22f986226083cf9e1150fa5438ea26" exitCode=0 Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.112373 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" event={"ID":"7b63b221-3873-4118-a434-cad816a03404","Type":"ContainerDied","Data":"9a6201f0cc1b769e80fe266bf19f0730dc22f986226083cf9e1150fa5438ea26"} Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.112454 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" event={"ID":"7b63b221-3873-4118-a434-cad816a03404","Type":"ContainerStarted","Data":"4f6e84fadfc43bef0f8666888b8c96010e3d2cf5f7e1dba6b07d946f0ae13e8a"} Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.176590 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1acf6558-7e16-4d00-b19f-c757db81dc58","Type":"ContainerStarted","Data":"733792964d1b647379ec2c992f12fc032da7cf1cc94dae88578bb2138b614226"} Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.178389 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lv4gb" event={"ID":"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605","Type":"ContainerStarted","Data":"3851eb4410c5f00bd60ff66065928ebd4542b4e668df64c96b4a174a4c77daf0"} Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.178436 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lv4gb" event={"ID":"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605","Type":"ContainerStarted","Data":"f648430fa500810133db8877898d4b7a58b26eb39ee5db42987432f45d8a4fa1"} Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.181523 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.183203 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6749b57f47-5wl2r" event={"ID":"3b3bf560-1015-4c1d-b4c5-2960599d7a84","Type":"ContainerDied","Data":"0ff2a6ca3dbb7ef6bac7ec5bbac7a2f8285f11b5a7cc143503a7375427607aee"} Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.189059 4730 scope.go:117] "RemoveContainer" containerID="a755cfc6cd1aaab58b47cdcc036ce9ceb35c6dab1d4441e5f47a1cc4b777945f" Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.228052 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-lv4gb" podStartSLOduration=3.228027735 podStartE2EDuration="3.228027735s" podCreationTimestamp="2025-09-30 10:07:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:07:16.224098025 +0000 UTC m=+1080.557358008" watchObservedRunningTime="2025-09-30 10:07:16.228027735 +0000 UTC m=+1080.561287728" Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.346097 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6749b57f47-5wl2r"] Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.356595 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6749b57f47-5wl2r"] Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.400466 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b3bf560-1015-4c1d-b4c5-2960599d7a84" path="/var/lib/kubelet/pods/3b3bf560-1015-4c1d-b4c5-2960599d7a84/volumes" Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.764907 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8780-account-create-xzbgl" Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.873756 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnxjt\" (UniqueName: \"kubernetes.io/projected/ce75a3c3-26fd-44b7-82c9-fe9edf0285fb-kube-api-access-tnxjt\") pod \"ce75a3c3-26fd-44b7-82c9-fe9edf0285fb\" (UID: \"ce75a3c3-26fd-44b7-82c9-fe9edf0285fb\") " Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.881977 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce75a3c3-26fd-44b7-82c9-fe9edf0285fb-kube-api-access-tnxjt" (OuterVolumeSpecName: "kube-api-access-tnxjt") pod "ce75a3c3-26fd-44b7-82c9-fe9edf0285fb" (UID: "ce75a3c3-26fd-44b7-82c9-fe9edf0285fb"). InnerVolumeSpecName "kube-api-access-tnxjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.904596 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:16 crc kubenswrapper[4730]: I0930 10:07:16.976267 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnxjt\" (UniqueName: \"kubernetes.io/projected/ce75a3c3-26fd-44b7-82c9-fe9edf0285fb-kube-api-access-tnxjt\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.077576 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78n6d\" (UniqueName: \"kubernetes.io/projected/7b63b221-3873-4118-a434-cad816a03404-kube-api-access-78n6d\") pod \"7b63b221-3873-4118-a434-cad816a03404\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.077700 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-ovsdbserver-sb\") pod \"7b63b221-3873-4118-a434-cad816a03404\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.077743 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-dns-svc\") pod \"7b63b221-3873-4118-a434-cad816a03404\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.077763 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-ovsdbserver-nb\") pod \"7b63b221-3873-4118-a434-cad816a03404\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.077817 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-config\") pod \"7b63b221-3873-4118-a434-cad816a03404\" (UID: \"7b63b221-3873-4118-a434-cad816a03404\") " Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.081980 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b63b221-3873-4118-a434-cad816a03404-kube-api-access-78n6d" (OuterVolumeSpecName: "kube-api-access-78n6d") pod "7b63b221-3873-4118-a434-cad816a03404" (UID: "7b63b221-3873-4118-a434-cad816a03404"). InnerVolumeSpecName "kube-api-access-78n6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.099655 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7b63b221-3873-4118-a434-cad816a03404" (UID: "7b63b221-3873-4118-a434-cad816a03404"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.100394 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-config" (OuterVolumeSpecName: "config") pod "7b63b221-3873-4118-a434-cad816a03404" (UID: "7b63b221-3873-4118-a434-cad816a03404"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.102534 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7b63b221-3873-4118-a434-cad816a03404" (UID: "7b63b221-3873-4118-a434-cad816a03404"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.103645 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7b63b221-3873-4118-a434-cad816a03404" (UID: "7b63b221-3873-4118-a434-cad816a03404"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.179429 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78n6d\" (UniqueName: \"kubernetes.io/projected/7b63b221-3873-4118-a434-cad816a03404-kube-api-access-78n6d\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.179735 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.179745 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.179755 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.179763 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b63b221-3873-4118-a434-cad816a03404-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.198662 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" event={"ID":"18e54bc5-0acd-437b-bce0-7cb0147c4ab1","Type":"ContainerStarted","Data":"e68e1c63f375fee1c775a9cd351f8f67166a04f049890483cfa34efa17456e74"} Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.198818 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.200278 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8780-account-create-xzbgl" event={"ID":"ce75a3c3-26fd-44b7-82c9-fe9edf0285fb","Type":"ContainerDied","Data":"da5a394a85724f5e061a5d762974ef3e1102dfef44c146a39d6e5344ee061217"} Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.200301 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da5a394a85724f5e061a5d762974ef3e1102dfef44c146a39d6e5344ee061217" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.200375 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8780-account-create-xzbgl" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.214319 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" event={"ID":"7b63b221-3873-4118-a434-cad816a03404","Type":"ContainerDied","Data":"4f6e84fadfc43bef0f8666888b8c96010e3d2cf5f7e1dba6b07d946f0ae13e8a"} Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.214387 4730 scope.go:117] "RemoveContainer" containerID="9a6201f0cc1b769e80fe266bf19f0730dc22f986226083cf9e1150fa5438ea26" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.214418 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b5f9766f-2w2fq" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.280836 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" podStartSLOduration=4.278592906 podStartE2EDuration="4.278592906s" podCreationTimestamp="2025-09-30 10:07:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:07:17.227092647 +0000 UTC m=+1081.560352640" watchObservedRunningTime="2025-09-30 10:07:17.278592906 +0000 UTC m=+1081.611852909" Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.302365 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9b5f9766f-2w2fq"] Sep 30 10:07:17 crc kubenswrapper[4730]: I0930 10:07:17.323188 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-9b5f9766f-2w2fq"] Sep 30 10:07:18 crc kubenswrapper[4730]: I0930 10:07:18.404311 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b63b221-3873-4118-a434-cad816a03404" path="/var/lib/kubelet/pods/7b63b221-3873-4118-a434-cad816a03404/volumes" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.575331 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-daf3-account-create-5rdlr"] Sep 30 10:07:19 crc kubenswrapper[4730]: E0930 10:07:19.575769 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b3bf560-1015-4c1d-b4c5-2960599d7a84" containerName="init" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.575787 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b3bf560-1015-4c1d-b4c5-2960599d7a84" containerName="init" Sep 30 10:07:19 crc kubenswrapper[4730]: E0930 10:07:19.575829 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b63b221-3873-4118-a434-cad816a03404" containerName="init" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.575838 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b63b221-3873-4118-a434-cad816a03404" containerName="init" Sep 30 10:07:19 crc kubenswrapper[4730]: E0930 10:07:19.575850 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce75a3c3-26fd-44b7-82c9-fe9edf0285fb" containerName="mariadb-account-create" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.575858 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce75a3c3-26fd-44b7-82c9-fe9edf0285fb" containerName="mariadb-account-create" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.576040 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b3bf560-1015-4c1d-b4c5-2960599d7a84" containerName="init" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.576062 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce75a3c3-26fd-44b7-82c9-fe9edf0285fb" containerName="mariadb-account-create" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.576072 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b63b221-3873-4118-a434-cad816a03404" containerName="init" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.576640 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-daf3-account-create-5rdlr" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.581116 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.587170 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-daf3-account-create-5rdlr"] Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.649575 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-fj5rw"] Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.651707 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-fj5rw" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.653952 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-zmnk5" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.654174 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.663354 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-fj5rw"] Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.734873 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gblqs\" (UniqueName: \"kubernetes.io/projected/4ccc082d-d035-4fa0-9a6b-76fa8b89c055-kube-api-access-gblqs\") pod \"neutron-daf3-account-create-5rdlr\" (UID: \"4ccc082d-d035-4fa0-9a6b-76fa8b89c055\") " pod="openstack/neutron-daf3-account-create-5rdlr" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.836523 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nk94d\" (UniqueName: \"kubernetes.io/projected/7fc71401-9b3c-455c-9a56-28c6fcbde898-kube-api-access-nk94d\") pod \"barbican-db-sync-fj5rw\" (UID: \"7fc71401-9b3c-455c-9a56-28c6fcbde898\") " pod="openstack/barbican-db-sync-fj5rw" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.837390 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fc71401-9b3c-455c-9a56-28c6fcbde898-combined-ca-bundle\") pod \"barbican-db-sync-fj5rw\" (UID: \"7fc71401-9b3c-455c-9a56-28c6fcbde898\") " pod="openstack/barbican-db-sync-fj5rw" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.837436 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gblqs\" (UniqueName: \"kubernetes.io/projected/4ccc082d-d035-4fa0-9a6b-76fa8b89c055-kube-api-access-gblqs\") pod \"neutron-daf3-account-create-5rdlr\" (UID: \"4ccc082d-d035-4fa0-9a6b-76fa8b89c055\") " pod="openstack/neutron-daf3-account-create-5rdlr" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.837514 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7fc71401-9b3c-455c-9a56-28c6fcbde898-db-sync-config-data\") pod \"barbican-db-sync-fj5rw\" (UID: \"7fc71401-9b3c-455c-9a56-28c6fcbde898\") " pod="openstack/barbican-db-sync-fj5rw" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.865419 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gblqs\" (UniqueName: \"kubernetes.io/projected/4ccc082d-d035-4fa0-9a6b-76fa8b89c055-kube-api-access-gblqs\") pod \"neutron-daf3-account-create-5rdlr\" (UID: \"4ccc082d-d035-4fa0-9a6b-76fa8b89c055\") " pod="openstack/neutron-daf3-account-create-5rdlr" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.939534 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nk94d\" (UniqueName: \"kubernetes.io/projected/7fc71401-9b3c-455c-9a56-28c6fcbde898-kube-api-access-nk94d\") pod \"barbican-db-sync-fj5rw\" (UID: \"7fc71401-9b3c-455c-9a56-28c6fcbde898\") " pod="openstack/barbican-db-sync-fj5rw" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.939664 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fc71401-9b3c-455c-9a56-28c6fcbde898-combined-ca-bundle\") pod \"barbican-db-sync-fj5rw\" (UID: \"7fc71401-9b3c-455c-9a56-28c6fcbde898\") " pod="openstack/barbican-db-sync-fj5rw" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.939793 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7fc71401-9b3c-455c-9a56-28c6fcbde898-db-sync-config-data\") pod \"barbican-db-sync-fj5rw\" (UID: \"7fc71401-9b3c-455c-9a56-28c6fcbde898\") " pod="openstack/barbican-db-sync-fj5rw" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.944040 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7fc71401-9b3c-455c-9a56-28c6fcbde898-db-sync-config-data\") pod \"barbican-db-sync-fj5rw\" (UID: \"7fc71401-9b3c-455c-9a56-28c6fcbde898\") " pod="openstack/barbican-db-sync-fj5rw" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.944764 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fc71401-9b3c-455c-9a56-28c6fcbde898-combined-ca-bundle\") pod \"barbican-db-sync-fj5rw\" (UID: \"7fc71401-9b3c-455c-9a56-28c6fcbde898\") " pod="openstack/barbican-db-sync-fj5rw" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.953260 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-daf3-account-create-5rdlr" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.960177 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nk94d\" (UniqueName: \"kubernetes.io/projected/7fc71401-9b3c-455c-9a56-28c6fcbde898-kube-api-access-nk94d\") pod \"barbican-db-sync-fj5rw\" (UID: \"7fc71401-9b3c-455c-9a56-28c6fcbde898\") " pod="openstack/barbican-db-sync-fj5rw" Sep 30 10:07:19 crc kubenswrapper[4730]: I0930 10:07:19.971074 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-fj5rw" Sep 30 10:07:20 crc kubenswrapper[4730]: I0930 10:07:20.256881 4730 generic.go:334] "Generic (PLEG): container finished" podID="f947945d-0a37-4104-95b5-d3437cd60556" containerID="0b14d0d5ef1f55874424dcf50a7700149498d19a754323e68e72ddac52d24ebe" exitCode=0 Sep 30 10:07:20 crc kubenswrapper[4730]: I0930 10:07:20.256920 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-47ddk" event={"ID":"f947945d-0a37-4104-95b5-d3437cd60556","Type":"ContainerDied","Data":"0b14d0d5ef1f55874424dcf50a7700149498d19a754323e68e72ddac52d24ebe"} Sep 30 10:07:21 crc kubenswrapper[4730]: I0930 10:07:21.266009 4730 generic.go:334] "Generic (PLEG): container finished" podID="1b9e2246-a00f-4a7f-8cbb-57a5f11d6605" containerID="3851eb4410c5f00bd60ff66065928ebd4542b4e668df64c96b4a174a4c77daf0" exitCode=0 Sep 30 10:07:21 crc kubenswrapper[4730]: I0930 10:07:21.266091 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lv4gb" event={"ID":"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605","Type":"ContainerDied","Data":"3851eb4410c5f00bd60ff66065928ebd4542b4e668df64c96b4a174a4c77daf0"} Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.082029 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-18fc-account-create-9j7xr" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.117464 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.178178 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzn84\" (UniqueName: \"kubernetes.io/projected/4bee9a35-25c0-44b5-8160-a2787eeea901-kube-api-access-jzn84\") pod \"4bee9a35-25c0-44b5-8160-a2787eeea901\" (UID: \"4bee9a35-25c0-44b5-8160-a2787eeea901\") " Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.184314 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bee9a35-25c0-44b5-8160-a2787eeea901-kube-api-access-jzn84" (OuterVolumeSpecName: "kube-api-access-jzn84") pod "4bee9a35-25c0-44b5-8160-a2787eeea901" (UID: "4bee9a35-25c0-44b5-8160-a2787eeea901"). InnerVolumeSpecName "kube-api-access-jzn84". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.249458 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-fj5rw"] Sep 30 10:07:22 crc kubenswrapper[4730]: W0930 10:07:22.252359 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7fc71401_9b3c_455c_9a56_28c6fcbde898.slice/crio-87f4815ad181706f267a70cc48afa9e853a8807787681388792bed2f60778276 WatchSource:0}: Error finding container 87f4815ad181706f267a70cc48afa9e853a8807787681388792bed2f60778276: Status 404 returned error can't find the container with id 87f4815ad181706f267a70cc48afa9e853a8807787681388792bed2f60778276 Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.280160 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-combined-ca-bundle\") pod \"f947945d-0a37-4104-95b5-d3437cd60556\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.280358 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45tk8\" (UniqueName: \"kubernetes.io/projected/f947945d-0a37-4104-95b5-d3437cd60556-kube-api-access-45tk8\") pod \"f947945d-0a37-4104-95b5-d3437cd60556\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.280388 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-db-sync-config-data\") pod \"f947945d-0a37-4104-95b5-d3437cd60556\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.280440 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-config-data\") pod \"f947945d-0a37-4104-95b5-d3437cd60556\" (UID: \"f947945d-0a37-4104-95b5-d3437cd60556\") " Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.280485 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-db-sync-47ddk" event={"ID":"f947945d-0a37-4104-95b5-d3437cd60556","Type":"ContainerDied","Data":"2f4bfa728642e4fccd50308e0416ce0ac8d2ac0bccb31204d47040ac119502d6"} Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.280545 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f4bfa728642e4fccd50308e0416ce0ac8d2ac0bccb31204d47040ac119502d6" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.280686 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-db-sync-47ddk" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.281263 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzn84\" (UniqueName: \"kubernetes.io/projected/4bee9a35-25c0-44b5-8160-a2787eeea901-kube-api-access-jzn84\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.284200 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1acf6558-7e16-4d00-b19f-c757db81dc58","Type":"ContainerStarted","Data":"dd1ce5807810f86dce08a7d2b15eca3cbad8bbaffbafef2cf992195da82d0d24"} Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.285498 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f947945d-0a37-4104-95b5-d3437cd60556-kube-api-access-45tk8" (OuterVolumeSpecName: "kube-api-access-45tk8") pod "f947945d-0a37-4104-95b5-d3437cd60556" (UID: "f947945d-0a37-4104-95b5-d3437cd60556"). InnerVolumeSpecName "kube-api-access-45tk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.286212 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-gcmkp" event={"ID":"64e70143-f93b-4808-b388-4faaa7f8e51d","Type":"ContainerStarted","Data":"bb516522601ec0b341f35b611fbd192370924e388303d36a652911ab98f7ee13"} Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.292276 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f947945d-0a37-4104-95b5-d3437cd60556" (UID: "f947945d-0a37-4104-95b5-d3437cd60556"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.292464 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-fj5rw" event={"ID":"7fc71401-9b3c-455c-9a56-28c6fcbde898","Type":"ContainerStarted","Data":"87f4815ad181706f267a70cc48afa9e853a8807787681388792bed2f60778276"} Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.299152 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-18fc-account-create-9j7xr" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.299776 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-18fc-account-create-9j7xr" event={"ID":"4bee9a35-25c0-44b5-8160-a2787eeea901","Type":"ContainerDied","Data":"070cab18281b927409e1b1075342d1743631dbe0cb2ab89b894671a890bc27ef"} Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.299966 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="070cab18281b927409e1b1075342d1743631dbe0cb2ab89b894671a890bc27ef" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.330583 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-gcmkp" podStartSLOduration=2.53230622 podStartE2EDuration="9.330562556s" podCreationTimestamp="2025-09-30 10:07:13 +0000 UTC" firstStartedPulling="2025-09-30 10:07:15.016752026 +0000 UTC m=+1079.350012029" lastFinishedPulling="2025-09-30 10:07:21.815008362 +0000 UTC m=+1086.148268365" observedRunningTime="2025-09-30 10:07:22.316131408 +0000 UTC m=+1086.649391401" watchObservedRunningTime="2025-09-30 10:07:22.330562556 +0000 UTC m=+1086.663822549" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.342089 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-daf3-account-create-5rdlr"] Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.353039 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f947945d-0a37-4104-95b5-d3437cd60556" (UID: "f947945d-0a37-4104-95b5-d3437cd60556"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.353901 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-config-data" (OuterVolumeSpecName: "config-data") pod "f947945d-0a37-4104-95b5-d3437cd60556" (UID: "f947945d-0a37-4104-95b5-d3437cd60556"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.383193 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45tk8\" (UniqueName: \"kubernetes.io/projected/f947945d-0a37-4104-95b5-d3437cd60556-kube-api-access-45tk8\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.383238 4730 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.383251 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.383261 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f947945d-0a37-4104-95b5-d3437cd60556-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.525968 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Sep 30 10:07:22 crc kubenswrapper[4730]: E0930 10:07:22.526321 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bee9a35-25c0-44b5-8160-a2787eeea901" containerName="mariadb-account-create" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.526337 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bee9a35-25c0-44b5-8160-a2787eeea901" containerName="mariadb-account-create" Sep 30 10:07:22 crc kubenswrapper[4730]: E0930 10:07:22.526355 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f947945d-0a37-4104-95b5-d3437cd60556" containerName="watcher-db-sync" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.526363 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f947945d-0a37-4104-95b5-d3437cd60556" containerName="watcher-db-sync" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.526573 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f947945d-0a37-4104-95b5-d3437cd60556" containerName="watcher-db-sync" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.526598 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bee9a35-25c0-44b5-8160-a2787eeea901" containerName="mariadb-account-create" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.568013 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.577285 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.588724 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.644007 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.645316 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.652328 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-decision-engine-config-data" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.657733 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-applier-0"] Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.658822 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.660594 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-applier-config-data" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.691557 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.698183 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-config-data\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.698249 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.698279 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-687n4\" (UniqueName: \"kubernetes.io/projected/87b74547-1e89-46b3-82b2-64592d256309-kube-api-access-687n4\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.698435 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.698460 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87b74547-1e89-46b3-82b2-64592d256309-logs\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.713914 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.799559 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db8ecf95-5b86-4775-85ee-a3da046e9dba-logs\") pod \"watcher-applier-0\" (UID: \"db8ecf95-5b86-4775-85ee-a3da046e9dba\") " pod="openstack/watcher-applier-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.799595 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db8ecf95-5b86-4775-85ee-a3da046e9dba-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"db8ecf95-5b86-4775-85ee-a3da046e9dba\") " pod="openstack/watcher-applier-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.799667 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.799691 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-logs\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.799727 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-config-data\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.799774 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8xms\" (UniqueName: \"kubernetes.io/projected/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-kube-api-access-x8xms\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.799796 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db8ecf95-5b86-4775-85ee-a3da046e9dba-config-data\") pod \"watcher-applier-0\" (UID: \"db8ecf95-5b86-4775-85ee-a3da046e9dba\") " pod="openstack/watcher-applier-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.799816 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.799846 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.799884 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-687n4\" (UniqueName: \"kubernetes.io/projected/87b74547-1e89-46b3-82b2-64592d256309-kube-api-access-687n4\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.799928 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.799952 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-config-data\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.799979 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87b74547-1e89-46b3-82b2-64592d256309-logs\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.800000 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2f5n\" (UniqueName: \"kubernetes.io/projected/db8ecf95-5b86-4775-85ee-a3da046e9dba-kube-api-access-r2f5n\") pod \"watcher-applier-0\" (UID: \"db8ecf95-5b86-4775-85ee-a3da046e9dba\") " pod="openstack/watcher-applier-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.802356 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87b74547-1e89-46b3-82b2-64592d256309-logs\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.807244 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.808203 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-config-data\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.808644 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.818769 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-687n4\" (UniqueName: \"kubernetes.io/projected/87b74547-1e89-46b3-82b2-64592d256309-kube-api-access-687n4\") pod \"watcher-api-0\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.900951 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-config-data\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.900989 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2f5n\" (UniqueName: \"kubernetes.io/projected/db8ecf95-5b86-4775-85ee-a3da046e9dba-kube-api-access-r2f5n\") pod \"watcher-applier-0\" (UID: \"db8ecf95-5b86-4775-85ee-a3da046e9dba\") " pod="openstack/watcher-applier-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.901039 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db8ecf95-5b86-4775-85ee-a3da046e9dba-logs\") pod \"watcher-applier-0\" (UID: \"db8ecf95-5b86-4775-85ee-a3da046e9dba\") " pod="openstack/watcher-applier-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.901058 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db8ecf95-5b86-4775-85ee-a3da046e9dba-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"db8ecf95-5b86-4775-85ee-a3da046e9dba\") " pod="openstack/watcher-applier-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.901099 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.901118 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-logs\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.901161 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8xms\" (UniqueName: \"kubernetes.io/projected/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-kube-api-access-x8xms\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.901177 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db8ecf95-5b86-4775-85ee-a3da046e9dba-config-data\") pod \"watcher-applier-0\" (UID: \"db8ecf95-5b86-4775-85ee-a3da046e9dba\") " pod="openstack/watcher-applier-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.901192 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.902044 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db8ecf95-5b86-4775-85ee-a3da046e9dba-logs\") pod \"watcher-applier-0\" (UID: \"db8ecf95-5b86-4775-85ee-a3da046e9dba\") " pod="openstack/watcher-applier-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.902074 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-logs\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.903129 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.904793 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.905635 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.911728 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db8ecf95-5b86-4775-85ee-a3da046e9dba-config-data\") pod \"watcher-applier-0\" (UID: \"db8ecf95-5b86-4775-85ee-a3da046e9dba\") " pod="openstack/watcher-applier-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.911726 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db8ecf95-5b86-4775-85ee-a3da046e9dba-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"db8ecf95-5b86-4775-85ee-a3da046e9dba\") " pod="openstack/watcher-applier-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.914316 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-config-data\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.925239 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8xms\" (UniqueName: \"kubernetes.io/projected/a4f9bd21-5f86-4443-87be-eadb5d1c77f9-kube-api-access-x8xms\") pod \"watcher-decision-engine-0\" (UID: \"a4f9bd21-5f86-4443-87be-eadb5d1c77f9\") " pod="openstack/watcher-decision-engine-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.925786 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.927155 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2f5n\" (UniqueName: \"kubernetes.io/projected/db8ecf95-5b86-4775-85ee-a3da046e9dba-kube-api-access-r2f5n\") pod \"watcher-applier-0\" (UID: \"db8ecf95-5b86-4775-85ee-a3da046e9dba\") " pod="openstack/watcher-applier-0" Sep 30 10:07:22 crc kubenswrapper[4730]: I0930 10:07:22.992527 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-decision-engine-0" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.002175 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-combined-ca-bundle\") pod \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.002266 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-fernet-keys\") pod \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.002295 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7qc7\" (UniqueName: \"kubernetes.io/projected/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-kube-api-access-j7qc7\") pod \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.002441 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-scripts\") pod \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.002507 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-credential-keys\") pod \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.002555 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-config-data\") pod \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\" (UID: \"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605\") " Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.012051 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-applier-0" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.023224 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1b9e2246-a00f-4a7f-8cbb-57a5f11d6605" (UID: "1b9e2246-a00f-4a7f-8cbb-57a5f11d6605"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.023344 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-kube-api-access-j7qc7" (OuterVolumeSpecName: "kube-api-access-j7qc7") pod "1b9e2246-a00f-4a7f-8cbb-57a5f11d6605" (UID: "1b9e2246-a00f-4a7f-8cbb-57a5f11d6605"). InnerVolumeSpecName "kube-api-access-j7qc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.023815 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "1b9e2246-a00f-4a7f-8cbb-57a5f11d6605" (UID: "1b9e2246-a00f-4a7f-8cbb-57a5f11d6605"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.026855 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-scripts" (OuterVolumeSpecName: "scripts") pod "1b9e2246-a00f-4a7f-8cbb-57a5f11d6605" (UID: "1b9e2246-a00f-4a7f-8cbb-57a5f11d6605"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.038389 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1b9e2246-a00f-4a7f-8cbb-57a5f11d6605" (UID: "1b9e2246-a00f-4a7f-8cbb-57a5f11d6605"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.047739 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-config-data" (OuterVolumeSpecName: "config-data") pod "1b9e2246-a00f-4a7f-8cbb-57a5f11d6605" (UID: "1b9e2246-a00f-4a7f-8cbb-57a5f11d6605"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.104472 4730 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.104506 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7qc7\" (UniqueName: \"kubernetes.io/projected/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-kube-api-access-j7qc7\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.104515 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.104523 4730 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.104532 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.104540 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.329223 4730 generic.go:334] "Generic (PLEG): container finished" podID="4ccc082d-d035-4fa0-9a6b-76fa8b89c055" containerID="9c9993ef51641126f6c41d89ea704dee0c15e26bcde6e19a8725ff9299e24232" exitCode=0 Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.329378 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-daf3-account-create-5rdlr" event={"ID":"4ccc082d-d035-4fa0-9a6b-76fa8b89c055","Type":"ContainerDied","Data":"9c9993ef51641126f6c41d89ea704dee0c15e26bcde6e19a8725ff9299e24232"} Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.329434 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-daf3-account-create-5rdlr" event={"ID":"4ccc082d-d035-4fa0-9a6b-76fa8b89c055","Type":"ContainerStarted","Data":"ea74abf9d6bf78e9b84cc2b3020db702c20fcefb5391fae2b17363cdfc31c6cb"} Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.378270 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lv4gb" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.378753 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lv4gb" event={"ID":"1b9e2246-a00f-4a7f-8cbb-57a5f11d6605","Type":"ContainerDied","Data":"f648430fa500810133db8877898d4b7a58b26eb39ee5db42987432f45d8a4fa1"} Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.378846 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f648430fa500810133db8877898d4b7a58b26eb39ee5db42987432f45d8a4fa1" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.413674 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-lv4gb"] Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.418010 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-lv4gb"] Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.436686 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.475775 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-5qwrj"] Sep 30 10:07:23 crc kubenswrapper[4730]: E0930 10:07:23.476212 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b9e2246-a00f-4a7f-8cbb-57a5f11d6605" containerName="keystone-bootstrap" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.476229 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b9e2246-a00f-4a7f-8cbb-57a5f11d6605" containerName="keystone-bootstrap" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.476430 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b9e2246-a00f-4a7f-8cbb-57a5f11d6605" containerName="keystone-bootstrap" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.477227 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.480657 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-wg7cm" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.481595 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.482202 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.482393 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.490153 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5qwrj"] Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.556850 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-applier-0"] Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.565923 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-decision-engine-0"] Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.621383 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-config-data\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.621435 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-fernet-keys\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.621516 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-scripts\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.621535 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vw6j\" (UniqueName: \"kubernetes.io/projected/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-kube-api-access-4vw6j\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.621688 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-credential-keys\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.621886 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-combined-ca-bundle\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.723187 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-scripts\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.723248 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vw6j\" (UniqueName: \"kubernetes.io/projected/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-kube-api-access-4vw6j\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.723294 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-credential-keys\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.723375 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-combined-ca-bundle\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.723475 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-config-data\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.723501 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-fernet-keys\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.731659 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-config-data\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.743846 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-fernet-keys\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.744359 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-combined-ca-bundle\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.746032 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-credential-keys\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.755433 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-scripts\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.769560 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vw6j\" (UniqueName: \"kubernetes.io/projected/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-kube-api-access-4vw6j\") pod \"keystone-bootstrap-5qwrj\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:23 crc kubenswrapper[4730]: I0930 10:07:23.805005 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.294212 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.407518 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b9e2246-a00f-4a7f-8cbb-57a5f11d6605" path="/var/lib/kubelet/pods/1b9e2246-a00f-4a7f-8cbb-57a5f11d6605/volumes" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.410106 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d87b9d675-rjngv"] Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.410762 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d87b9d675-rjngv" podUID="75687951-20a4-47c3-a5f2-397cdb1953f4" containerName="dnsmasq-dns" containerID="cri-o://b360036e53fc39b990d51c018a8ef23f2941b9c1c08a23aef095462863feafd2" gracePeriod=10 Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.422789 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1acf6558-7e16-4d00-b19f-c757db81dc58","Type":"ContainerStarted","Data":"04331fc8abaa27fb293dc73e41c6257c32f9236443a99c36e822aa0f750cbd51"} Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.435846 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"a4f9bd21-5f86-4443-87be-eadb5d1c77f9","Type":"ContainerStarted","Data":"c12bb87babaac0d5dfc712c7fb4c00330860ca8f9140a6a3728ef47a0b8d5b52"} Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.440760 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"db8ecf95-5b86-4775-85ee-a3da046e9dba","Type":"ContainerStarted","Data":"84d87cf1200be7af74451f42b043c45b87aa91bf87445bbd7aa337b9d785d0b7"} Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.444333 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"87b74547-1e89-46b3-82b2-64592d256309","Type":"ContainerStarted","Data":"e09c064a4008a0cf1eb83b0ca3fc5ba062c21a679935086f11174c671e85cbe9"} Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.444375 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"87b74547-1e89-46b3-82b2-64592d256309","Type":"ContainerStarted","Data":"d0176f644173c3853e811e5dd424a2c014535e3f4bf4ac48e93eb87fd9755aa9"} Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.580787 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-5qwrj"] Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.685811 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-wrvww"] Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.693072 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.696299 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-w988v" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.697137 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.698187 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.702420 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-wrvww"] Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.856155 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9jsl\" (UniqueName: \"kubernetes.io/projected/488816e9-d4e6-4956-9671-c9de4118821c-kube-api-access-w9jsl\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.856211 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-db-sync-config-data\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.856247 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-config-data\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.856276 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-combined-ca-bundle\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.856385 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-scripts\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.856404 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/488816e9-d4e6-4956-9671-c9de4118821c-etc-machine-id\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.883479 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-daf3-account-create-5rdlr" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.958211 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-scripts\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.958276 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/488816e9-d4e6-4956-9671-c9de4118821c-etc-machine-id\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.958332 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9jsl\" (UniqueName: \"kubernetes.io/projected/488816e9-d4e6-4956-9671-c9de4118821c-kube-api-access-w9jsl\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.958369 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-db-sync-config-data\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.958399 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-config-data\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.958431 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-combined-ca-bundle\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.959104 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/488816e9-d4e6-4956-9671-c9de4118821c-etc-machine-id\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.967215 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-combined-ca-bundle\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.973778 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-scripts\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.982845 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-config-data\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:24 crc kubenswrapper[4730]: I0930 10:07:24.985218 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9jsl\" (UniqueName: \"kubernetes.io/projected/488816e9-d4e6-4956-9671-c9de4118821c-kube-api-access-w9jsl\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.006216 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-db-sync-config-data\") pod \"cinder-db-sync-wrvww\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.059340 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gblqs\" (UniqueName: \"kubernetes.io/projected/4ccc082d-d035-4fa0-9a6b-76fa8b89c055-kube-api-access-gblqs\") pod \"4ccc082d-d035-4fa0-9a6b-76fa8b89c055\" (UID: \"4ccc082d-d035-4fa0-9a6b-76fa8b89c055\") " Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.070270 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ccc082d-d035-4fa0-9a6b-76fa8b89c055-kube-api-access-gblqs" (OuterVolumeSpecName: "kube-api-access-gblqs") pod "4ccc082d-d035-4fa0-9a6b-76fa8b89c055" (UID: "4ccc082d-d035-4fa0-9a6b-76fa8b89c055"). InnerVolumeSpecName "kube-api-access-gblqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.158413 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-wrvww" Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.161183 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gblqs\" (UniqueName: \"kubernetes.io/projected/4ccc082d-d035-4fa0-9a6b-76fa8b89c055-kube-api-access-gblqs\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.459645 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5qwrj" event={"ID":"d1b317d0-1ec7-4bb5-9218-e853eaecbe43","Type":"ContainerStarted","Data":"98307790e39834692f9e054993bc684b2f8357a2a577bbc1719af4a10fca0927"} Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.459925 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5qwrj" event={"ID":"d1b317d0-1ec7-4bb5-9218-e853eaecbe43","Type":"ContainerStarted","Data":"3f5add9369f1e293ecc38048d21d6232f6a1a68b2f724613ccadf41bfe58c32f"} Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.462702 4730 generic.go:334] "Generic (PLEG): container finished" podID="75687951-20a4-47c3-a5f2-397cdb1953f4" containerID="b360036e53fc39b990d51c018a8ef23f2941b9c1c08a23aef095462863feafd2" exitCode=0 Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.462763 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d87b9d675-rjngv" event={"ID":"75687951-20a4-47c3-a5f2-397cdb1953f4","Type":"ContainerDied","Data":"b360036e53fc39b990d51c018a8ef23f2941b9c1c08a23aef095462863feafd2"} Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.464671 4730 generic.go:334] "Generic (PLEG): container finished" podID="64e70143-f93b-4808-b388-4faaa7f8e51d" containerID="bb516522601ec0b341f35b611fbd192370924e388303d36a652911ab98f7ee13" exitCode=0 Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.464753 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-gcmkp" event={"ID":"64e70143-f93b-4808-b388-4faaa7f8e51d","Type":"ContainerDied","Data":"bb516522601ec0b341f35b611fbd192370924e388303d36a652911ab98f7ee13"} Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.469061 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-daf3-account-create-5rdlr" Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.469064 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-daf3-account-create-5rdlr" event={"ID":"4ccc082d-d035-4fa0-9a6b-76fa8b89c055","Type":"ContainerDied","Data":"ea74abf9d6bf78e9b84cc2b3020db702c20fcefb5391fae2b17363cdfc31c6cb"} Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.469185 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea74abf9d6bf78e9b84cc2b3020db702c20fcefb5391fae2b17363cdfc31c6cb" Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.471582 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"87b74547-1e89-46b3-82b2-64592d256309","Type":"ContainerStarted","Data":"5f2cdf60e0c77cf0533a2dc4212470e5cc80d0361f080fa81815b8d897137ed2"} Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.471812 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.484415 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-5qwrj" podStartSLOduration=2.484397874 podStartE2EDuration="2.484397874s" podCreationTimestamp="2025-09-30 10:07:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:07:25.473368234 +0000 UTC m=+1089.806628227" watchObservedRunningTime="2025-09-30 10:07:25.484397874 +0000 UTC m=+1089.817657867" Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.500507 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=3.500481334 podStartE2EDuration="3.500481334s" podCreationTimestamp="2025-09-30 10:07:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:07:25.488765595 +0000 UTC m=+1089.822025598" watchObservedRunningTime="2025-09-30 10:07:25.500481334 +0000 UTC m=+1089.833741327" Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.825224 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.981502 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-ovsdbserver-sb\") pod \"75687951-20a4-47c3-a5f2-397cdb1953f4\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.981625 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-ovsdbserver-nb\") pod \"75687951-20a4-47c3-a5f2-397cdb1953f4\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.981662 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqcwm\" (UniqueName: \"kubernetes.io/projected/75687951-20a4-47c3-a5f2-397cdb1953f4-kube-api-access-tqcwm\") pod \"75687951-20a4-47c3-a5f2-397cdb1953f4\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.981693 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-dns-svc\") pod \"75687951-20a4-47c3-a5f2-397cdb1953f4\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.981827 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-config\") pod \"75687951-20a4-47c3-a5f2-397cdb1953f4\" (UID: \"75687951-20a4-47c3-a5f2-397cdb1953f4\") " Sep 30 10:07:25 crc kubenswrapper[4730]: I0930 10:07:25.986733 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75687951-20a4-47c3-a5f2-397cdb1953f4-kube-api-access-tqcwm" (OuterVolumeSpecName: "kube-api-access-tqcwm") pod "75687951-20a4-47c3-a5f2-397cdb1953f4" (UID: "75687951-20a4-47c3-a5f2-397cdb1953f4"). InnerVolumeSpecName "kube-api-access-tqcwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.033048 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "75687951-20a4-47c3-a5f2-397cdb1953f4" (UID: "75687951-20a4-47c3-a5f2-397cdb1953f4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.037317 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-config" (OuterVolumeSpecName: "config") pod "75687951-20a4-47c3-a5f2-397cdb1953f4" (UID: "75687951-20a4-47c3-a5f2-397cdb1953f4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.038167 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "75687951-20a4-47c3-a5f2-397cdb1953f4" (UID: "75687951-20a4-47c3-a5f2-397cdb1953f4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.044175 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "75687951-20a4-47c3-a5f2-397cdb1953f4" (UID: "75687951-20a4-47c3-a5f2-397cdb1953f4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.083998 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.084033 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.084045 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.084054 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqcwm\" (UniqueName: \"kubernetes.io/projected/75687951-20a4-47c3-a5f2-397cdb1953f4-kube-api-access-tqcwm\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.084062 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75687951-20a4-47c3-a5f2-397cdb1953f4-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.490236 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d87b9d675-rjngv" event={"ID":"75687951-20a4-47c3-a5f2-397cdb1953f4","Type":"ContainerDied","Data":"1679914c6933b7b227bc597fe27dd055dc244bb91a01509dcd80f9c5d5e00e72"} Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.490558 4730 scope.go:117] "RemoveContainer" containerID="b360036e53fc39b990d51c018a8ef23f2941b9c1c08a23aef095462863feafd2" Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.490437 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d87b9d675-rjngv" Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.517652 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d87b9d675-rjngv"] Sep 30 10:07:26 crc kubenswrapper[4730]: I0930 10:07:26.524940 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d87b9d675-rjngv"] Sep 30 10:07:27 crc kubenswrapper[4730]: I0930 10:07:27.926534 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Sep 30 10:07:27 crc kubenswrapper[4730]: I0930 10:07:27.926965 4730 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 10:07:28 crc kubenswrapper[4730]: I0930 10:07:28.037255 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Sep 30 10:07:28 crc kubenswrapper[4730]: I0930 10:07:28.391140 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75687951-20a4-47c3-a5f2-397cdb1953f4" path="/var/lib/kubelet/pods/75687951-20a4-47c3-a5f2-397cdb1953f4/volumes" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.014886 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.160356 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-config-data\") pod \"64e70143-f93b-4808-b388-4faaa7f8e51d\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.160630 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89g67\" (UniqueName: \"kubernetes.io/projected/64e70143-f93b-4808-b388-4faaa7f8e51d-kube-api-access-89g67\") pod \"64e70143-f93b-4808-b388-4faaa7f8e51d\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.160696 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-scripts\") pod \"64e70143-f93b-4808-b388-4faaa7f8e51d\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.160720 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64e70143-f93b-4808-b388-4faaa7f8e51d-logs\") pod \"64e70143-f93b-4808-b388-4faaa7f8e51d\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.160750 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-combined-ca-bundle\") pod \"64e70143-f93b-4808-b388-4faaa7f8e51d\" (UID: \"64e70143-f93b-4808-b388-4faaa7f8e51d\") " Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.161666 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64e70143-f93b-4808-b388-4faaa7f8e51d-logs" (OuterVolumeSpecName: "logs") pod "64e70143-f93b-4808-b388-4faaa7f8e51d" (UID: "64e70143-f93b-4808-b388-4faaa7f8e51d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.165772 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-scripts" (OuterVolumeSpecName: "scripts") pod "64e70143-f93b-4808-b388-4faaa7f8e51d" (UID: "64e70143-f93b-4808-b388-4faaa7f8e51d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.165955 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64e70143-f93b-4808-b388-4faaa7f8e51d-kube-api-access-89g67" (OuterVolumeSpecName: "kube-api-access-89g67") pod "64e70143-f93b-4808-b388-4faaa7f8e51d" (UID: "64e70143-f93b-4808-b388-4faaa7f8e51d"). InnerVolumeSpecName "kube-api-access-89g67". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.184410 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-config-data" (OuterVolumeSpecName: "config-data") pod "64e70143-f93b-4808-b388-4faaa7f8e51d" (UID: "64e70143-f93b-4808-b388-4faaa7f8e51d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.200068 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64e70143-f93b-4808-b388-4faaa7f8e51d" (UID: "64e70143-f93b-4808-b388-4faaa7f8e51d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.262396 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89g67\" (UniqueName: \"kubernetes.io/projected/64e70143-f93b-4808-b388-4faaa7f8e51d-kube-api-access-89g67\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.262439 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.262453 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64e70143-f93b-4808-b388-4faaa7f8e51d-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.262467 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.262478 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64e70143-f93b-4808-b388-4faaa7f8e51d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.517124 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-gcmkp" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.517123 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-gcmkp" event={"ID":"64e70143-f93b-4808-b388-4faaa7f8e51d","Type":"ContainerDied","Data":"3724328affcb9ce63fbdb09f1ae3c687bb9b0cd8830693d880c43f376749d5bc"} Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.517257 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3724328affcb9ce63fbdb09f1ae3c687bb9b0cd8830693d880c43f376749d5bc" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.518908 4730 generic.go:334] "Generic (PLEG): container finished" podID="d1b317d0-1ec7-4bb5-9218-e853eaecbe43" containerID="98307790e39834692f9e054993bc684b2f8357a2a577bbc1719af4a10fca0927" exitCode=0 Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.519073 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5qwrj" event={"ID":"d1b317d0-1ec7-4bb5-9218-e853eaecbe43","Type":"ContainerDied","Data":"98307790e39834692f9e054993bc684b2f8357a2a577bbc1719af4a10fca0927"} Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.900600 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-cmvzl"] Sep 30 10:07:29 crc kubenswrapper[4730]: E0930 10:07:29.901056 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e70143-f93b-4808-b388-4faaa7f8e51d" containerName="placement-db-sync" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.901082 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e70143-f93b-4808-b388-4faaa7f8e51d" containerName="placement-db-sync" Sep 30 10:07:29 crc kubenswrapper[4730]: E0930 10:07:29.901126 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75687951-20a4-47c3-a5f2-397cdb1953f4" containerName="dnsmasq-dns" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.901136 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="75687951-20a4-47c3-a5f2-397cdb1953f4" containerName="dnsmasq-dns" Sep 30 10:07:29 crc kubenswrapper[4730]: E0930 10:07:29.901149 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ccc082d-d035-4fa0-9a6b-76fa8b89c055" containerName="mariadb-account-create" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.901156 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ccc082d-d035-4fa0-9a6b-76fa8b89c055" containerName="mariadb-account-create" Sep 30 10:07:29 crc kubenswrapper[4730]: E0930 10:07:29.901168 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75687951-20a4-47c3-a5f2-397cdb1953f4" containerName="init" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.901177 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="75687951-20a4-47c3-a5f2-397cdb1953f4" containerName="init" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.901390 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="64e70143-f93b-4808-b388-4faaa7f8e51d" containerName="placement-db-sync" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.901419 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ccc082d-d035-4fa0-9a6b-76fa8b89c055" containerName="mariadb-account-create" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.901440 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="75687951-20a4-47c3-a5f2-397cdb1953f4" containerName="dnsmasq-dns" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.902242 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-cmvzl" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.906297 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.906360 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.910289 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-97m5r" Sep 30 10:07:29 crc kubenswrapper[4730]: I0930 10:07:29.927254 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-cmvzl"] Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.074566 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/577c636c-9e1c-4e65-b164-dcc8e200d7c1-combined-ca-bundle\") pod \"neutron-db-sync-cmvzl\" (UID: \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\") " pod="openstack/neutron-db-sync-cmvzl" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.074648 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/577c636c-9e1c-4e65-b164-dcc8e200d7c1-config\") pod \"neutron-db-sync-cmvzl\" (UID: \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\") " pod="openstack/neutron-db-sync-cmvzl" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.074774 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfc74\" (UniqueName: \"kubernetes.io/projected/577c636c-9e1c-4e65-b164-dcc8e200d7c1-kube-api-access-qfc74\") pod \"neutron-db-sync-cmvzl\" (UID: \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\") " pod="openstack/neutron-db-sync-cmvzl" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.122133 4730 scope.go:117] "RemoveContainer" containerID="f74c7acaa016be8cba95d1920acc16be85555d837c579c3435db1395bdad011d" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.177511 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfc74\" (UniqueName: \"kubernetes.io/projected/577c636c-9e1c-4e65-b164-dcc8e200d7c1-kube-api-access-qfc74\") pod \"neutron-db-sync-cmvzl\" (UID: \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\") " pod="openstack/neutron-db-sync-cmvzl" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.179355 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/577c636c-9e1c-4e65-b164-dcc8e200d7c1-combined-ca-bundle\") pod \"neutron-db-sync-cmvzl\" (UID: \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\") " pod="openstack/neutron-db-sync-cmvzl" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.184115 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/577c636c-9e1c-4e65-b164-dcc8e200d7c1-config\") pod \"neutron-db-sync-cmvzl\" (UID: \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\") " pod="openstack/neutron-db-sync-cmvzl" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.189837 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/577c636c-9e1c-4e65-b164-dcc8e200d7c1-combined-ca-bundle\") pod \"neutron-db-sync-cmvzl\" (UID: \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\") " pod="openstack/neutron-db-sync-cmvzl" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.190626 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/577c636c-9e1c-4e65-b164-dcc8e200d7c1-config\") pod \"neutron-db-sync-cmvzl\" (UID: \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\") " pod="openstack/neutron-db-sync-cmvzl" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.194248 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfc74\" (UniqueName: \"kubernetes.io/projected/577c636c-9e1c-4e65-b164-dcc8e200d7c1-kube-api-access-qfc74\") pod \"neutron-db-sync-cmvzl\" (UID: \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\") " pod="openstack/neutron-db-sync-cmvzl" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.217926 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7d48d7c7fd-7l8hx"] Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.222132 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.224102 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.228058 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-4ftjh" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.228332 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.228579 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.229506 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.229963 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-cmvzl" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.231922 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7d48d7c7fd-7l8hx"] Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.393494 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-scripts\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.393544 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-combined-ca-bundle\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.393569 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8qqg\" (UniqueName: \"kubernetes.io/projected/57cdca52-f1f9-48c7-8fb6-9144a033c957-kube-api-access-w8qqg\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.393597 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-public-tls-certs\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.393686 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57cdca52-f1f9-48c7-8fb6-9144a033c957-logs\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.393750 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-internal-tls-certs\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.393782 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-config-data\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.495791 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-scripts\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.495844 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-combined-ca-bundle\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.495867 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8qqg\" (UniqueName: \"kubernetes.io/projected/57cdca52-f1f9-48c7-8fb6-9144a033c957-kube-api-access-w8qqg\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.495893 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-public-tls-certs\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.495989 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57cdca52-f1f9-48c7-8fb6-9144a033c957-logs\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.496076 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-internal-tls-certs\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.496116 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-config-data\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.496869 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57cdca52-f1f9-48c7-8fb6-9144a033c957-logs\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.499989 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-scripts\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.500183 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-public-tls-certs\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.500252 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-config-data\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.503119 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-internal-tls-certs\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.515593 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57cdca52-f1f9-48c7-8fb6-9144a033c957-combined-ca-bundle\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.522444 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8qqg\" (UniqueName: \"kubernetes.io/projected/57cdca52-f1f9-48c7-8fb6-9144a033c957-kube-api-access-w8qqg\") pod \"placement-7d48d7c7fd-7l8hx\" (UID: \"57cdca52-f1f9-48c7-8fb6-9144a033c957\") " pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:30 crc kubenswrapper[4730]: I0930 10:07:30.562172 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.195000 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.331422 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-fernet-keys\") pod \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.331491 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-combined-ca-bundle\") pod \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.331518 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-scripts\") pod \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.331635 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-config-data\") pod \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.331720 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-credential-keys\") pod \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.331803 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vw6j\" (UniqueName: \"kubernetes.io/projected/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-kube-api-access-4vw6j\") pod \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\" (UID: \"d1b317d0-1ec7-4bb5-9218-e853eaecbe43\") " Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.337219 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.337279 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.341076 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d1b317d0-1ec7-4bb5-9218-e853eaecbe43" (UID: "d1b317d0-1ec7-4bb5-9218-e853eaecbe43"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.342324 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-scripts" (OuterVolumeSpecName: "scripts") pod "d1b317d0-1ec7-4bb5-9218-e853eaecbe43" (UID: "d1b317d0-1ec7-4bb5-9218-e853eaecbe43"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.344836 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-kube-api-access-4vw6j" (OuterVolumeSpecName: "kube-api-access-4vw6j") pod "d1b317d0-1ec7-4bb5-9218-e853eaecbe43" (UID: "d1b317d0-1ec7-4bb5-9218-e853eaecbe43"). InnerVolumeSpecName "kube-api-access-4vw6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.345539 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d1b317d0-1ec7-4bb5-9218-e853eaecbe43" (UID: "d1b317d0-1ec7-4bb5-9218-e853eaecbe43"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.371650 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d1b317d0-1ec7-4bb5-9218-e853eaecbe43" (UID: "d1b317d0-1ec7-4bb5-9218-e853eaecbe43"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.385920 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-config-data" (OuterVolumeSpecName: "config-data") pod "d1b317d0-1ec7-4bb5-9218-e853eaecbe43" (UID: "d1b317d0-1ec7-4bb5-9218-e853eaecbe43"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.427384 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-wrvww"] Sep 30 10:07:32 crc kubenswrapper[4730]: W0930 10:07:32.427800 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod488816e9_d4e6_4956_9671_c9de4118821c.slice/crio-627b4e4ea5280d050face9e330a8d3540bdc9d8c022f887f4d930951d6380517 WatchSource:0}: Error finding container 627b4e4ea5280d050face9e330a8d3540bdc9d8c022f887f4d930951d6380517: Status 404 returned error can't find the container with id 627b4e4ea5280d050face9e330a8d3540bdc9d8c022f887f4d930951d6380517 Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.434131 4730 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.434166 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.434179 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.434190 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.434210 4730 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.434223 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vw6j\" (UniqueName: \"kubernetes.io/projected/d1b317d0-1ec7-4bb5-9218-e853eaecbe43-kube-api-access-4vw6j\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.574078 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-wrvww" event={"ID":"488816e9-d4e6-4956-9671-c9de4118821c","Type":"ContainerStarted","Data":"627b4e4ea5280d050face9e330a8d3540bdc9d8c022f887f4d930951d6380517"} Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.579954 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-applier-0" event={"ID":"db8ecf95-5b86-4775-85ee-a3da046e9dba","Type":"ContainerStarted","Data":"83be5e15a852f4b1b312956951e60b6c6e7f455eda2f7876a95b01c7ace679d8"} Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.582378 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1acf6558-7e16-4d00-b19f-c757db81dc58","Type":"ContainerStarted","Data":"35170d1b138d98c0ee51c133179a1e6f61e35a90307e81f46017a14a894c7e5d"} Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.589725 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-5qwrj" event={"ID":"d1b317d0-1ec7-4bb5-9218-e853eaecbe43","Type":"ContainerDied","Data":"3f5add9369f1e293ecc38048d21d6232f6a1a68b2f724613ccadf41bfe58c32f"} Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.589764 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f5add9369f1e293ecc38048d21d6232f6a1a68b2f724613ccadf41bfe58c32f" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.589828 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-5qwrj" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.593884 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-fj5rw" event={"ID":"7fc71401-9b3c-455c-9a56-28c6fcbde898","Type":"ContainerStarted","Data":"94569ff68ed078a7a7205f05e47b9f78a7ae562146af2648048462abbdf3cb11"} Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.596173 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-cmvzl"] Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.604869 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"a4f9bd21-5f86-4443-87be-eadb5d1c77f9","Type":"ContainerStarted","Data":"f221a9aa65cec1a8d6cd07c7fd83d5f118dd091b859e3d97e66657018b1a7af5"} Sep 30 10:07:32 crc kubenswrapper[4730]: W0930 10:07:32.607519 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57cdca52_f1f9_48c7_8fb6_9144a033c957.slice/crio-de892ccaf32f038c122369573520be34a79d40b2465d968f72cf8de11ebbe1ad WatchSource:0}: Error finding container de892ccaf32f038c122369573520be34a79d40b2465d968f72cf8de11ebbe1ad: Status 404 returned error can't find the container with id de892ccaf32f038c122369573520be34a79d40b2465d968f72cf8de11ebbe1ad Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.609435 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7d48d7c7fd-7l8hx"] Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.616256 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-applier-0" podStartSLOduration=2.6673724439999997 podStartE2EDuration="10.616238846s" podCreationTimestamp="2025-09-30 10:07:22 +0000 UTC" firstStartedPulling="2025-09-30 10:07:24.077568501 +0000 UTC m=+1088.410828494" lastFinishedPulling="2025-09-30 10:07:32.026434913 +0000 UTC m=+1096.359694896" observedRunningTime="2025-09-30 10:07:32.60501471 +0000 UTC m=+1096.938274723" watchObservedRunningTime="2025-09-30 10:07:32.616238846 +0000 UTC m=+1096.949498849" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.626084 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-fj5rw" podStartSLOduration=3.8972922309999998 podStartE2EDuration="13.626066175s" podCreationTimestamp="2025-09-30 10:07:19 +0000 UTC" firstStartedPulling="2025-09-30 10:07:22.255291601 +0000 UTC m=+1086.588551594" lastFinishedPulling="2025-09-30 10:07:31.984065545 +0000 UTC m=+1096.317325538" observedRunningTime="2025-09-30 10:07:32.61958125 +0000 UTC m=+1096.952841243" watchObservedRunningTime="2025-09-30 10:07:32.626066175 +0000 UTC m=+1096.959326178" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.643460 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-decision-engine-0" podStartSLOduration=2.708377085 podStartE2EDuration="10.643437007s" podCreationTimestamp="2025-09-30 10:07:22 +0000 UTC" firstStartedPulling="2025-09-30 10:07:24.0468925 +0000 UTC m=+1088.380152493" lastFinishedPulling="2025-09-30 10:07:31.981952422 +0000 UTC m=+1096.315212415" observedRunningTime="2025-09-30 10:07:32.634070018 +0000 UTC m=+1096.967330001" watchObservedRunningTime="2025-09-30 10:07:32.643437007 +0000 UTC m=+1096.976697020" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.926813 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.939209 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Sep 30 10:07:32 crc kubenswrapper[4730]: I0930 10:07:32.993172 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.013359 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-applier-0" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.013406 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-applier-0" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.070731 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.090723 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-applier-0" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.374879 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7dfc9d946-psk9c"] Sep 30 10:07:33 crc kubenswrapper[4730]: E0930 10:07:33.375341 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1b317d0-1ec7-4bb5-9218-e853eaecbe43" containerName="keystone-bootstrap" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.375362 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1b317d0-1ec7-4bb5-9218-e853eaecbe43" containerName="keystone-bootstrap" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.375595 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1b317d0-1ec7-4bb5-9218-e853eaecbe43" containerName="keystone-bootstrap" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.376406 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.380191 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.381093 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.381138 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.381283 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.381487 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-wg7cm" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.381686 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.386832 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7dfc9d946-psk9c"] Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.457267 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-credential-keys\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.457332 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-scripts\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.457396 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-combined-ca-bundle\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.457477 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-config-data\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.457504 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52j5s\" (UniqueName: \"kubernetes.io/projected/fdc671fd-529e-484c-9924-355c64d393ff-kube-api-access-52j5s\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.457587 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-internal-tls-certs\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.457658 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-fernet-keys\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.457691 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-public-tls-certs\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.558822 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-internal-tls-certs\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.558925 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-fernet-keys\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.558958 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-public-tls-certs\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.559863 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-credential-keys\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.559903 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-scripts\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.559960 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-combined-ca-bundle\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.560036 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-config-data\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.560063 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52j5s\" (UniqueName: \"kubernetes.io/projected/fdc671fd-529e-484c-9924-355c64d393ff-kube-api-access-52j5s\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.563333 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-public-tls-certs\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.564832 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-fernet-keys\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.565388 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-internal-tls-certs\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.565675 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-credential-keys\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.569314 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-config-data\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.571897 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-combined-ca-bundle\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.577306 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fdc671fd-529e-484c-9924-355c64d393ff-scripts\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.589303 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52j5s\" (UniqueName: \"kubernetes.io/projected/fdc671fd-529e-484c-9924-355c64d393ff-kube-api-access-52j5s\") pod \"keystone-7dfc9d946-psk9c\" (UID: \"fdc671fd-529e-484c-9924-355c64d393ff\") " pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.692967 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-cmvzl" event={"ID":"577c636c-9e1c-4e65-b164-dcc8e200d7c1","Type":"ContainerStarted","Data":"db9ce9066063546a453449bd45807592f03643733df7fdeb5f1f204ff80fd0fc"} Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.693053 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-cmvzl" event={"ID":"577c636c-9e1c-4e65-b164-dcc8e200d7c1","Type":"ContainerStarted","Data":"382a5f83342243e3b596f071c1b2f8a9c5fa04f7b6f9520da54458fcd6ce5661"} Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.697913 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d48d7c7fd-7l8hx" event={"ID":"57cdca52-f1f9-48c7-8fb6-9144a033c957","Type":"ContainerStarted","Data":"bb8d367c39126335d8325c45c5a82e1afb847f0c0f45765a9bf96791f0787e8d"} Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.697981 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d48d7c7fd-7l8hx" event={"ID":"57cdca52-f1f9-48c7-8fb6-9144a033c957","Type":"ContainerStarted","Data":"09bcb2c3b4d762269be55620bcfe049068a22a9a649ecba963dea520c2908581"} Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.697995 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7d48d7c7fd-7l8hx" event={"ID":"57cdca52-f1f9-48c7-8fb6-9144a033c957","Type":"ContainerStarted","Data":"de892ccaf32f038c122369573520be34a79d40b2465d968f72cf8de11ebbe1ad"} Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.699586 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.716067 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.727450 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-cmvzl" podStartSLOduration=4.727431808 podStartE2EDuration="4.727431808s" podCreationTimestamp="2025-09-30 10:07:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:07:33.722272998 +0000 UTC m=+1098.055533001" watchObservedRunningTime="2025-09-30 10:07:33.727431808 +0000 UTC m=+1098.060691791" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.758397 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-applier-0" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.780068 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7d48d7c7fd-7l8hx" podStartSLOduration=3.7794989230000002 podStartE2EDuration="3.779498923s" podCreationTimestamp="2025-09-30 10:07:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:07:33.778323243 +0000 UTC m=+1098.111583236" watchObservedRunningTime="2025-09-30 10:07:33.779498923 +0000 UTC m=+1098.112758916" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.811856 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Sep 30 10:07:33 crc kubenswrapper[4730]: I0930 10:07:33.878876 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Sep 30 10:07:34 crc kubenswrapper[4730]: I0930 10:07:34.466803 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7dfc9d946-psk9c"] Sep 30 10:07:34 crc kubenswrapper[4730]: I0930 10:07:34.708434 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7dfc9d946-psk9c" event={"ID":"fdc671fd-529e-484c-9924-355c64d393ff","Type":"ContainerStarted","Data":"c919ad33737dcf7716d5d7fe8e29c0cd098fe8886d4d0a1ff4b65cfb850a1d6f"} Sep 30 10:07:34 crc kubenswrapper[4730]: I0930 10:07:34.709903 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:34 crc kubenswrapper[4730]: I0930 10:07:34.710289 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:07:35 crc kubenswrapper[4730]: I0930 10:07:35.717569 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7dfc9d946-psk9c" event={"ID":"fdc671fd-529e-484c-9924-355c64d393ff","Type":"ContainerStarted","Data":"715fd1ba4af24c3c1e288ceba2c05ff33e59ec2fe335e14004b8b0dfe87c21db"} Sep 30 10:07:35 crc kubenswrapper[4730]: I0930 10:07:35.744852 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7dfc9d946-psk9c" podStartSLOduration=2.744827172 podStartE2EDuration="2.744827172s" podCreationTimestamp="2025-09-30 10:07:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:07:35.73493904 +0000 UTC m=+1100.068199033" watchObservedRunningTime="2025-09-30 10:07:35.744827172 +0000 UTC m=+1100.078087165" Sep 30 10:07:36 crc kubenswrapper[4730]: I0930 10:07:36.142274 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Sep 30 10:07:36 crc kubenswrapper[4730]: I0930 10:07:36.143072 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="87b74547-1e89-46b3-82b2-64592d256309" containerName="watcher-api-log" containerID="cri-o://e09c064a4008a0cf1eb83b0ca3fc5ba062c21a679935086f11174c671e85cbe9" gracePeriod=30 Sep 30 10:07:36 crc kubenswrapper[4730]: I0930 10:07:36.143140 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/watcher-api-0" podUID="87b74547-1e89-46b3-82b2-64592d256309" containerName="watcher-api" containerID="cri-o://5f2cdf60e0c77cf0533a2dc4212470e5cc80d0361f080fa81815b8d897137ed2" gracePeriod=30 Sep 30 10:07:36 crc kubenswrapper[4730]: I0930 10:07:36.733738 4730 generic.go:334] "Generic (PLEG): container finished" podID="87b74547-1e89-46b3-82b2-64592d256309" containerID="e09c064a4008a0cf1eb83b0ca3fc5ba062c21a679935086f11174c671e85cbe9" exitCode=143 Sep 30 10:07:36 crc kubenswrapper[4730]: I0930 10:07:36.733783 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"87b74547-1e89-46b3-82b2-64592d256309","Type":"ContainerDied","Data":"e09c064a4008a0cf1eb83b0ca3fc5ba062c21a679935086f11174c671e85cbe9"} Sep 30 10:07:36 crc kubenswrapper[4730]: I0930 10:07:36.734733 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:07:37 crc kubenswrapper[4730]: I0930 10:07:37.754285 4730 generic.go:334] "Generic (PLEG): container finished" podID="a4f9bd21-5f86-4443-87be-eadb5d1c77f9" containerID="f221a9aa65cec1a8d6cd07c7fd83d5f118dd091b859e3d97e66657018b1a7af5" exitCode=1 Sep 30 10:07:37 crc kubenswrapper[4730]: I0930 10:07:37.754630 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"a4f9bd21-5f86-4443-87be-eadb5d1c77f9","Type":"ContainerDied","Data":"f221a9aa65cec1a8d6cd07c7fd83d5f118dd091b859e3d97e66657018b1a7af5"} Sep 30 10:07:37 crc kubenswrapper[4730]: I0930 10:07:37.755437 4730 scope.go:117] "RemoveContainer" containerID="f221a9aa65cec1a8d6cd07c7fd83d5f118dd091b859e3d97e66657018b1a7af5" Sep 30 10:07:37 crc kubenswrapper[4730]: I0930 10:07:37.761054 4730 generic.go:334] "Generic (PLEG): container finished" podID="87b74547-1e89-46b3-82b2-64592d256309" containerID="5f2cdf60e0c77cf0533a2dc4212470e5cc80d0361f080fa81815b8d897137ed2" exitCode=0 Sep 30 10:07:37 crc kubenswrapper[4730]: I0930 10:07:37.761162 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"87b74547-1e89-46b3-82b2-64592d256309","Type":"ContainerDied","Data":"5f2cdf60e0c77cf0533a2dc4212470e5cc80d0361f080fa81815b8d897137ed2"} Sep 30 10:07:37 crc kubenswrapper[4730]: I0930 10:07:37.765985 4730 generic.go:334] "Generic (PLEG): container finished" podID="7fc71401-9b3c-455c-9a56-28c6fcbde898" containerID="94569ff68ed078a7a7205f05e47b9f78a7ae562146af2648048462abbdf3cb11" exitCode=0 Sep 30 10:07:37 crc kubenswrapper[4730]: I0930 10:07:37.766064 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-fj5rw" event={"ID":"7fc71401-9b3c-455c-9a56-28c6fcbde898","Type":"ContainerDied","Data":"94569ff68ed078a7a7205f05e47b9f78a7ae562146af2648048462abbdf3cb11"} Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.273067 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.433294 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87b74547-1e89-46b3-82b2-64592d256309-logs\") pod \"87b74547-1e89-46b3-82b2-64592d256309\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.433561 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-687n4\" (UniqueName: \"kubernetes.io/projected/87b74547-1e89-46b3-82b2-64592d256309-kube-api-access-687n4\") pod \"87b74547-1e89-46b3-82b2-64592d256309\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.434038 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-custom-prometheus-ca\") pod \"87b74547-1e89-46b3-82b2-64592d256309\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.434771 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-combined-ca-bundle\") pod \"87b74547-1e89-46b3-82b2-64592d256309\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.434805 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-config-data\") pod \"87b74547-1e89-46b3-82b2-64592d256309\" (UID: \"87b74547-1e89-46b3-82b2-64592d256309\") " Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.440851 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87b74547-1e89-46b3-82b2-64592d256309-logs" (OuterVolumeSpecName: "logs") pod "87b74547-1e89-46b3-82b2-64592d256309" (UID: "87b74547-1e89-46b3-82b2-64592d256309"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.463252 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "87b74547-1e89-46b3-82b2-64592d256309" (UID: "87b74547-1e89-46b3-82b2-64592d256309"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.464835 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87b74547-1e89-46b3-82b2-64592d256309-kube-api-access-687n4" (OuterVolumeSpecName: "kube-api-access-687n4") pod "87b74547-1e89-46b3-82b2-64592d256309" (UID: "87b74547-1e89-46b3-82b2-64592d256309"). InnerVolumeSpecName "kube-api-access-687n4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.467844 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "87b74547-1e89-46b3-82b2-64592d256309" (UID: "87b74547-1e89-46b3-82b2-64592d256309"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.491598 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-config-data" (OuterVolumeSpecName: "config-data") pod "87b74547-1e89-46b3-82b2-64592d256309" (UID: "87b74547-1e89-46b3-82b2-64592d256309"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.539388 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.539424 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.539432 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87b74547-1e89-46b3-82b2-64592d256309-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.539441 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-687n4\" (UniqueName: \"kubernetes.io/projected/87b74547-1e89-46b3-82b2-64592d256309-kube-api-access-687n4\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.539450 4730 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/87b74547-1e89-46b3-82b2-64592d256309-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.797083 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"87b74547-1e89-46b3-82b2-64592d256309","Type":"ContainerDied","Data":"d0176f644173c3853e811e5dd424a2c014535e3f4bf4ac48e93eb87fd9755aa9"} Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.797144 4730 scope.go:117] "RemoveContainer" containerID="5f2cdf60e0c77cf0533a2dc4212470e5cc80d0361f080fa81815b8d897137ed2" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.797171 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.830974 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-api-0"] Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.843739 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-api-0"] Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.859193 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/watcher-api-0"] Sep 30 10:07:40 crc kubenswrapper[4730]: E0930 10:07:40.859689 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87b74547-1e89-46b3-82b2-64592d256309" containerName="watcher-api" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.859712 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="87b74547-1e89-46b3-82b2-64592d256309" containerName="watcher-api" Sep 30 10:07:40 crc kubenswrapper[4730]: E0930 10:07:40.859742 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87b74547-1e89-46b3-82b2-64592d256309" containerName="watcher-api-log" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.859750 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="87b74547-1e89-46b3-82b2-64592d256309" containerName="watcher-api-log" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.859971 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="87b74547-1e89-46b3-82b2-64592d256309" containerName="watcher-api-log" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.859992 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="87b74547-1e89-46b3-82b2-64592d256309" containerName="watcher-api" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.861293 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.866276 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"watcher-api-config-data" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.866299 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-internal-svc" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.866929 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-watcher-public-svc" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.872202 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.945193 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-public-tls-certs\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.945246 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-config-data\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.945303 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.945325 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfpzb\" (UniqueName: \"kubernetes.io/projected/4caba7a4-0751-4410-88f1-084d5289d1c6-kube-api-access-dfpzb\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.945345 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4caba7a4-0751-4410-88f1-084d5289d1c6-logs\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.945406 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:40 crc kubenswrapper[4730]: I0930 10:07:40.945436 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.046643 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-public-tls-certs\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.046694 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-config-data\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.046732 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.046754 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfpzb\" (UniqueName: \"kubernetes.io/projected/4caba7a4-0751-4410-88f1-084d5289d1c6-kube-api-access-dfpzb\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.046772 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4caba7a4-0751-4410-88f1-084d5289d1c6-logs\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.046806 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.046827 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.047825 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4caba7a4-0751-4410-88f1-084d5289d1c6-logs\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.050992 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.051028 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-public-tls-certs\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.051448 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-config-data\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.053274 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.061499 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/4caba7a4-0751-4410-88f1-084d5289d1c6-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.066291 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfpzb\" (UniqueName: \"kubernetes.io/projected/4caba7a4-0751-4410-88f1-084d5289d1c6-kube-api-access-dfpzb\") pod \"watcher-api-0\" (UID: \"4caba7a4-0751-4410-88f1-084d5289d1c6\") " pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.192398 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/watcher-api-0" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.713576 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-fj5rw" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.805986 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-fj5rw" event={"ID":"7fc71401-9b3c-455c-9a56-28c6fcbde898","Type":"ContainerDied","Data":"87f4815ad181706f267a70cc48afa9e853a8807787681388792bed2f60778276"} Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.806031 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87f4815ad181706f267a70cc48afa9e853a8807787681388792bed2f60778276" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.806090 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-fj5rw" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.858896 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nk94d\" (UniqueName: \"kubernetes.io/projected/7fc71401-9b3c-455c-9a56-28c6fcbde898-kube-api-access-nk94d\") pod \"7fc71401-9b3c-455c-9a56-28c6fcbde898\" (UID: \"7fc71401-9b3c-455c-9a56-28c6fcbde898\") " Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.859016 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fc71401-9b3c-455c-9a56-28c6fcbde898-combined-ca-bundle\") pod \"7fc71401-9b3c-455c-9a56-28c6fcbde898\" (UID: \"7fc71401-9b3c-455c-9a56-28c6fcbde898\") " Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.859201 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7fc71401-9b3c-455c-9a56-28c6fcbde898-db-sync-config-data\") pod \"7fc71401-9b3c-455c-9a56-28c6fcbde898\" (UID: \"7fc71401-9b3c-455c-9a56-28c6fcbde898\") " Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.864698 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7fc71401-9b3c-455c-9a56-28c6fcbde898-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7fc71401-9b3c-455c-9a56-28c6fcbde898" (UID: "7fc71401-9b3c-455c-9a56-28c6fcbde898"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.865249 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fc71401-9b3c-455c-9a56-28c6fcbde898-kube-api-access-nk94d" (OuterVolumeSpecName: "kube-api-access-nk94d") pod "7fc71401-9b3c-455c-9a56-28c6fcbde898" (UID: "7fc71401-9b3c-455c-9a56-28c6fcbde898"). InnerVolumeSpecName "kube-api-access-nk94d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.905702 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7fc71401-9b3c-455c-9a56-28c6fcbde898-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7fc71401-9b3c-455c-9a56-28c6fcbde898" (UID: "7fc71401-9b3c-455c-9a56-28c6fcbde898"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.960870 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fc71401-9b3c-455c-9a56-28c6fcbde898-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.960900 4730 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7fc71401-9b3c-455c-9a56-28c6fcbde898-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:41 crc kubenswrapper[4730]: I0930 10:07:41.960909 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nk94d\" (UniqueName: \"kubernetes.io/projected/7fc71401-9b3c-455c-9a56-28c6fcbde898-kube-api-access-nk94d\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:42 crc kubenswrapper[4730]: I0930 10:07:42.392050 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87b74547-1e89-46b3-82b2-64592d256309" path="/var/lib/kubelet/pods/87b74547-1e89-46b3-82b2-64592d256309/volumes" Sep 30 10:07:42 crc kubenswrapper[4730]: I0930 10:07:42.927173 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="87b74547-1e89-46b3-82b2-64592d256309" containerName="watcher-api-log" probeResult="failure" output="Get \"http://10.217.0.151:9322/\": dial tcp 10.217.0.151:9322: i/o timeout (Client.Timeout exceeded while awaiting headers)" Sep 30 10:07:42 crc kubenswrapper[4730]: I0930 10:07:42.927886 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/watcher-api-0" podUID="87b74547-1e89-46b3-82b2-64592d256309" containerName="watcher-api" probeResult="failure" output="Get \"http://10.217.0.151:9322/\": dial tcp 10.217.0.151:9322: i/o timeout (Client.Timeout exceeded while awaiting headers)" Sep 30 10:07:42 crc kubenswrapper[4730]: I0930 10:07:42.997011 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.007661 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-665f7f89df-b6qmw"] Sep 30 10:07:43 crc kubenswrapper[4730]: E0930 10:07:43.008037 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fc71401-9b3c-455c-9a56-28c6fcbde898" containerName="barbican-db-sync" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.008054 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fc71401-9b3c-455c-9a56-28c6fcbde898" containerName="barbican-db-sync" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.008240 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fc71401-9b3c-455c-9a56-28c6fcbde898" containerName="barbican-db-sync" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.009192 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.013543 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-zmnk5" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.013762 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.013875 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.025862 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6cdddb844-vsgsp"] Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.027407 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.034023 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.047755 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-665f7f89df-b6qmw"] Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.078254 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6cdddb844-vsgsp"] Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.080998 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87696e9f-ed08-459f-80b7-c4c5499e4157-combined-ca-bundle\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.081041 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84zbv\" (UniqueName: \"kubernetes.io/projected/1018e1cd-c432-45b3-8267-0f37607cff2f-kube-api-access-84zbv\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.081062 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87696e9f-ed08-459f-80b7-c4c5499e4157-logs\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.081091 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87696e9f-ed08-459f-80b7-c4c5499e4157-config-data\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.081112 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1018e1cd-c432-45b3-8267-0f37607cff2f-config-data-custom\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.081150 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9tfx\" (UniqueName: \"kubernetes.io/projected/87696e9f-ed08-459f-80b7-c4c5499e4157-kube-api-access-v9tfx\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.081188 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1018e1cd-c432-45b3-8267-0f37607cff2f-config-data\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.081204 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1018e1cd-c432-45b3-8267-0f37607cff2f-combined-ca-bundle\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.081233 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87696e9f-ed08-459f-80b7-c4c5499e4157-config-data-custom\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.081270 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1018e1cd-c432-45b3-8267-0f37607cff2f-logs\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.135848 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8469d44f79-gv4w7"] Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.137625 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.148255 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8469d44f79-gv4w7"] Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182134 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87696e9f-ed08-459f-80b7-c4c5499e4157-combined-ca-bundle\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182185 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84zbv\" (UniqueName: \"kubernetes.io/projected/1018e1cd-c432-45b3-8267-0f37607cff2f-kube-api-access-84zbv\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182205 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87696e9f-ed08-459f-80b7-c4c5499e4157-logs\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182226 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-config\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182257 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87696e9f-ed08-459f-80b7-c4c5499e4157-config-data\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182290 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1018e1cd-c432-45b3-8267-0f37607cff2f-config-data-custom\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182333 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9tfx\" (UniqueName: \"kubernetes.io/projected/87696e9f-ed08-459f-80b7-c4c5499e4157-kube-api-access-v9tfx\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182358 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-ovsdbserver-sb\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182395 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-dns-svc\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182418 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-ovsdbserver-nb\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182449 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1018e1cd-c432-45b3-8267-0f37607cff2f-combined-ca-bundle\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182471 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1018e1cd-c432-45b3-8267-0f37607cff2f-config-data\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182505 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87696e9f-ed08-459f-80b7-c4c5499e4157-config-data-custom\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182536 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8m87\" (UniqueName: \"kubernetes.io/projected/ec73b184-a60f-4477-8988-b9f6d937316c-kube-api-access-j8m87\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182577 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1018e1cd-c432-45b3-8267-0f37607cff2f-logs\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182791 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87696e9f-ed08-459f-80b7-c4c5499e4157-logs\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.182937 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1018e1cd-c432-45b3-8267-0f37607cff2f-logs\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.188361 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1018e1cd-c432-45b3-8267-0f37607cff2f-config-data\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.191344 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87696e9f-ed08-459f-80b7-c4c5499e4157-config-data\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.202448 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87696e9f-ed08-459f-80b7-c4c5499e4157-config-data-custom\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.204247 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9tfx\" (UniqueName: \"kubernetes.io/projected/87696e9f-ed08-459f-80b7-c4c5499e4157-kube-api-access-v9tfx\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.214554 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-69f567c75d-8tg9t"] Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.216041 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.217585 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87696e9f-ed08-459f-80b7-c4c5499e4157-combined-ca-bundle\") pod \"barbican-worker-665f7f89df-b6qmw\" (UID: \"87696e9f-ed08-459f-80b7-c4c5499e4157\") " pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.218157 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1018e1cd-c432-45b3-8267-0f37607cff2f-config-data-custom\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.218392 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.222643 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84zbv\" (UniqueName: \"kubernetes.io/projected/1018e1cd-c432-45b3-8267-0f37607cff2f-kube-api-access-84zbv\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.223053 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1018e1cd-c432-45b3-8267-0f37607cff2f-combined-ca-bundle\") pod \"barbican-keystone-listener-6cdddb844-vsgsp\" (UID: \"1018e1cd-c432-45b3-8267-0f37607cff2f\") " pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.246289 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-69f567c75d-8tg9t"] Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.283780 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-config\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.283853 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-ovsdbserver-sb\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.283877 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-dns-svc\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.283907 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-ovsdbserver-nb\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.283954 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8m87\" (UniqueName: \"kubernetes.io/projected/ec73b184-a60f-4477-8988-b9f6d937316c-kube-api-access-j8m87\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.285101 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-ovsdbserver-nb\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.285451 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-dns-svc\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.285764 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-config\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.285902 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-ovsdbserver-sb\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.301672 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8m87\" (UniqueName: \"kubernetes.io/projected/ec73b184-a60f-4477-8988-b9f6d937316c-kube-api-access-j8m87\") pod \"dnsmasq-dns-8469d44f79-gv4w7\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.363036 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-665f7f89df-b6qmw" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.385001 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.385946 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mfjw\" (UniqueName: \"kubernetes.io/projected/a5cb84ea-d720-4c98-a515-e28490dd2af4-kube-api-access-6mfjw\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.386028 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-combined-ca-bundle\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.387151 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-config-data-custom\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.387247 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-config-data\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.387271 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5cb84ea-d720-4c98-a515-e28490dd2af4-logs\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.464067 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.489372 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mfjw\" (UniqueName: \"kubernetes.io/projected/a5cb84ea-d720-4c98-a515-e28490dd2af4-kube-api-access-6mfjw\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.489450 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-combined-ca-bundle\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.489521 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-config-data-custom\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.489555 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-config-data\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.489570 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5cb84ea-d720-4c98-a515-e28490dd2af4-logs\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.518521 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5cb84ea-d720-4c98-a515-e28490dd2af4-logs\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.523197 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mfjw\" (UniqueName: \"kubernetes.io/projected/a5cb84ea-d720-4c98-a515-e28490dd2af4-kube-api-access-6mfjw\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.525201 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-config-data-custom\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.526647 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-combined-ca-bundle\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.526813 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-config-data\") pod \"barbican-api-69f567c75d-8tg9t\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:43 crc kubenswrapper[4730]: I0930 10:07:43.605020 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.525472 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-54cc89d54-v6hb5"] Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.527249 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.534060 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.534261 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.539596 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54cc89d54-v6hb5"] Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.726374 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/857c5dfa-3085-497b-8466-96eefd60c85d-logs\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.726448 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbjmn\" (UniqueName: \"kubernetes.io/projected/857c5dfa-3085-497b-8466-96eefd60c85d-kube-api-access-nbjmn\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.726497 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-config-data\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.726529 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-internal-tls-certs\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.726553 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-combined-ca-bundle\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.726630 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-config-data-custom\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.726912 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-public-tls-certs\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.829102 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/857c5dfa-3085-497b-8466-96eefd60c85d-logs\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.829441 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbjmn\" (UniqueName: \"kubernetes.io/projected/857c5dfa-3085-497b-8466-96eefd60c85d-kube-api-access-nbjmn\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.829472 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-config-data\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.829494 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-internal-tls-certs\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.829514 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-combined-ca-bundle\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.829533 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-config-data-custom\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.829590 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-public-tls-certs\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.832088 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/857c5dfa-3085-497b-8466-96eefd60c85d-logs\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.836695 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-config-data-custom\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.836896 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-public-tls-certs\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.837503 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-combined-ca-bundle\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.838744 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-internal-tls-certs\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.848135 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbjmn\" (UniqueName: \"kubernetes.io/projected/857c5dfa-3085-497b-8466-96eefd60c85d-kube-api-access-nbjmn\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.853457 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/857c5dfa-3085-497b-8466-96eefd60c85d-config-data\") pod \"barbican-api-54cc89d54-v6hb5\" (UID: \"857c5dfa-3085-497b-8466-96eefd60c85d\") " pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:45 crc kubenswrapper[4730]: I0930 10:07:45.863267 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:51 crc kubenswrapper[4730]: I0930 10:07:51.871341 4730 scope.go:117] "RemoveContainer" containerID="e09c064a4008a0cf1eb83b0ca3fc5ba062c21a679935086f11174c671e85cbe9" Sep 30 10:07:51 crc kubenswrapper[4730]: E0930 10:07:51.913992 4730 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.176:5001/podified-master-centos10/openstack-cinder-api:watcher_latest" Sep 30 10:07:51 crc kubenswrapper[4730]: E0930 10:07:51.914045 4730 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.176:5001/podified-master-centos10/openstack-cinder-api:watcher_latest" Sep 30 10:07:51 crc kubenswrapper[4730]: E0930 10:07:51.914209 4730 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:38.102.83.176:5001/podified-master-centos10/openstack-cinder-api:watcher_latest,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w9jsl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-wrvww_openstack(488816e9-d4e6-4956-9671-c9de4118821c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 10:07:51 crc kubenswrapper[4730]: E0930 10:07:51.915406 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-wrvww" podUID="488816e9-d4e6-4956-9671-c9de4118821c" Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.534149 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-54cc89d54-v6hb5"] Sep 30 10:07:52 crc kubenswrapper[4730]: W0930 10:07:52.849839 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec73b184_a60f_4477_8988_b9f6d937316c.slice/crio-c7668521126261df6e12638f93e18d3a2e3839fbfe4c23b5dafe0ddd85824960 WatchSource:0}: Error finding container c7668521126261df6e12638f93e18d3a2e3839fbfe4c23b5dafe0ddd85824960: Status 404 returned error can't find the container with id c7668521126261df6e12638f93e18d3a2e3839fbfe4c23b5dafe0ddd85824960 Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.856403 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-69f567c75d-8tg9t"] Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.871490 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/watcher-api-0"] Sep 30 10:07:52 crc kubenswrapper[4730]: W0930 10:07:52.875218 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87696e9f_ed08_459f_80b7_c4c5499e4157.slice/crio-d1cc41f83f24a276d46bb5cf7b619aafa63bc1fcb7f8264a6944369720cade90 WatchSource:0}: Error finding container d1cc41f83f24a276d46bb5cf7b619aafa63bc1fcb7f8264a6944369720cade90: Status 404 returned error can't find the container with id d1cc41f83f24a276d46bb5cf7b619aafa63bc1fcb7f8264a6944369720cade90 Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.882631 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8469d44f79-gv4w7"] Sep 30 10:07:52 crc kubenswrapper[4730]: W0930 10:07:52.883391 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1018e1cd_c432_45b3_8267_0f37607cff2f.slice/crio-00a0dd601f4f7b5a64986f01f2225e16b94de5283da7b153c617c87807ccdaf9 WatchSource:0}: Error finding container 00a0dd601f4f7b5a64986f01f2225e16b94de5283da7b153c617c87807ccdaf9: Status 404 returned error can't find the container with id 00a0dd601f4f7b5a64986f01f2225e16b94de5283da7b153c617c87807ccdaf9 Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.893744 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-665f7f89df-b6qmw"] Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.902767 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6cdddb844-vsgsp"] Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.907474 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" event={"ID":"ec73b184-a60f-4477-8988-b9f6d937316c","Type":"ContainerStarted","Data":"c7668521126261df6e12638f93e18d3a2e3839fbfe4c23b5dafe0ddd85824960"} Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.909257 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" event={"ID":"1018e1cd-c432-45b3-8267-0f37607cff2f","Type":"ContainerStarted","Data":"00a0dd601f4f7b5a64986f01f2225e16b94de5283da7b153c617c87807ccdaf9"} Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.911477 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54cc89d54-v6hb5" event={"ID":"857c5dfa-3085-497b-8466-96eefd60c85d","Type":"ContainerStarted","Data":"e6ec819d18d9de46347cb9a8087e117ebfa6335b0daf2609ec8336f38da8837e"} Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.911528 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54cc89d54-v6hb5" event={"ID":"857c5dfa-3085-497b-8466-96eefd60c85d","Type":"ContainerStarted","Data":"8a6ebf86a4b96567014888c3dca668a558e2c92d90714dd8cad58cd74a0a40fd"} Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.914444 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-69f567c75d-8tg9t" event={"ID":"a5cb84ea-d720-4c98-a515-e28490dd2af4","Type":"ContainerStarted","Data":"5a8fae8938d0bdab036da21a87787c0b16a7a8718aeb249531bf43500945dac6"} Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.920198 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"a4f9bd21-5f86-4443-87be-eadb5d1c77f9","Type":"ContainerStarted","Data":"e752f863911725e4007eead2b8230aaef061b3efb1cf650c3a8365b075e404e5"} Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.923220 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-665f7f89df-b6qmw" event={"ID":"87696e9f-ed08-459f-80b7-c4c5499e4157","Type":"ContainerStarted","Data":"d1cc41f83f24a276d46bb5cf7b619aafa63bc1fcb7f8264a6944369720cade90"} Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.926681 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"4caba7a4-0751-4410-88f1-084d5289d1c6","Type":"ContainerStarted","Data":"1ebbcfb013567f818e338245881e4dbb12148fbf1494ad9e5ab3074c501bc61a"} Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.940648 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="ceilometer-central-agent" containerID="cri-o://dd1ce5807810f86dce08a7d2b15eca3cbad8bbaffbafef2cf992195da82d0d24" gracePeriod=30 Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.940896 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1acf6558-7e16-4d00-b19f-c757db81dc58","Type":"ContainerStarted","Data":"79f29aeeff204b49d08af2ecf53bef3de5b7df8bf2821c68577269521cadca57"} Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.941222 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="proxy-httpd" containerID="cri-o://79f29aeeff204b49d08af2ecf53bef3de5b7df8bf2821c68577269521cadca57" gracePeriod=30 Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.941354 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="sg-core" containerID="cri-o://35170d1b138d98c0ee51c133179a1e6f61e35a90307e81f46017a14a894c7e5d" gracePeriod=30 Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.941393 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="ceilometer-notification-agent" containerID="cri-o://04331fc8abaa27fb293dc73e41c6257c32f9236443a99c36e822aa0f750cbd51" gracePeriod=30 Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.941468 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 10:07:52 crc kubenswrapper[4730]: E0930 10:07:52.941672 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.176:5001/podified-master-centos10/openstack-cinder-api:watcher_latest\\\"\"" pod="openstack/cinder-db-sync-wrvww" podUID="488816e9-d4e6-4956-9671-c9de4118821c" Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.997455 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 30 10:07:52 crc kubenswrapper[4730]: I0930 10:07:52.997521 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.035968 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.056282 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.221960271 podStartE2EDuration="40.056266054s" podCreationTimestamp="2025-09-30 10:07:13 +0000 UTC" firstStartedPulling="2025-09-30 10:07:15.08414094 +0000 UTC m=+1079.417400933" lastFinishedPulling="2025-09-30 10:07:51.918446723 +0000 UTC m=+1116.251706716" observedRunningTime="2025-09-30 10:07:52.998374782 +0000 UTC m=+1117.331634835" watchObservedRunningTime="2025-09-30 10:07:53.056266054 +0000 UTC m=+1117.389526047" Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.949354 4730 generic.go:334] "Generic (PLEG): container finished" podID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerID="79f29aeeff204b49d08af2ecf53bef3de5b7df8bf2821c68577269521cadca57" exitCode=0 Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.949698 4730 generic.go:334] "Generic (PLEG): container finished" podID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerID="35170d1b138d98c0ee51c133179a1e6f61e35a90307e81f46017a14a894c7e5d" exitCode=2 Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.949709 4730 generic.go:334] "Generic (PLEG): container finished" podID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerID="dd1ce5807810f86dce08a7d2b15eca3cbad8bbaffbafef2cf992195da82d0d24" exitCode=0 Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.949536 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1acf6558-7e16-4d00-b19f-c757db81dc58","Type":"ContainerDied","Data":"79f29aeeff204b49d08af2ecf53bef3de5b7df8bf2821c68577269521cadca57"} Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.949775 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1acf6558-7e16-4d00-b19f-c757db81dc58","Type":"ContainerDied","Data":"35170d1b138d98c0ee51c133179a1e6f61e35a90307e81f46017a14a894c7e5d"} Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.949791 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1acf6558-7e16-4d00-b19f-c757db81dc58","Type":"ContainerDied","Data":"dd1ce5807810f86dce08a7d2b15eca3cbad8bbaffbafef2cf992195da82d0d24"} Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.951695 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-69f567c75d-8tg9t" event={"ID":"a5cb84ea-d720-4c98-a515-e28490dd2af4","Type":"ContainerStarted","Data":"cc77bf0faf285f46d37cda0de4d2ec462c83281672fcb1dd85cd84b88861edd3"} Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.951725 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-69f567c75d-8tg9t" event={"ID":"a5cb84ea-d720-4c98-a515-e28490dd2af4","Type":"ContainerStarted","Data":"176c613290be40bf9e30218ba39a996dbcdbd04898e894727efcccf97ebe7fc9"} Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.951852 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.951877 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.955659 4730 generic.go:334] "Generic (PLEG): container finished" podID="ec73b184-a60f-4477-8988-b9f6d937316c" containerID="bf5c05e2975ac0e010a3af6d68dbbbdf5a572e6fe490cb40dd2f9f5dda9d17b9" exitCode=0 Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.955720 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" event={"ID":"ec73b184-a60f-4477-8988-b9f6d937316c","Type":"ContainerDied","Data":"bf5c05e2975ac0e010a3af6d68dbbbdf5a572e6fe490cb40dd2f9f5dda9d17b9"} Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.958211 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-54cc89d54-v6hb5" event={"ID":"857c5dfa-3085-497b-8466-96eefd60c85d","Type":"ContainerStarted","Data":"dd6ccb27788aa286d8fadb2e05e470b220ce4199fab1bc4af29af452187c41b8"} Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.958900 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.959031 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.960575 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"4caba7a4-0751-4410-88f1-084d5289d1c6","Type":"ContainerStarted","Data":"962de4318a2bbd1d33c719b0d25702a48fb274910abe5fcc2f65b5fb247d2590"} Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.960628 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-api-0" event={"ID":"4caba7a4-0751-4410-88f1-084d5289d1c6","Type":"ContainerStarted","Data":"5c6999974ca650e20bc48001a9e9c4d1abd2656919849c3e674e538db7c62d2f"} Sep 30 10:07:53 crc kubenswrapper[4730]: I0930 10:07:53.984761 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-69f567c75d-8tg9t" podStartSLOduration=10.984737731 podStartE2EDuration="10.984737731s" podCreationTimestamp="2025-09-30 10:07:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:07:53.966386114 +0000 UTC m=+1118.299646107" watchObservedRunningTime="2025-09-30 10:07:53.984737731 +0000 UTC m=+1118.317997724" Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.002703 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/watcher-api-0" podStartSLOduration=14.002682027 podStartE2EDuration="14.002682027s" podCreationTimestamp="2025-09-30 10:07:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:07:53.98784506 +0000 UTC m=+1118.321105053" watchObservedRunningTime="2025-09-30 10:07:54.002682027 +0000 UTC m=+1118.335942030" Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.030567 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-54cc89d54-v6hb5" podStartSLOduration=9.030546716 podStartE2EDuration="9.030546716s" podCreationTimestamp="2025-09-30 10:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:07:54.022404849 +0000 UTC m=+1118.355664862" watchObservedRunningTime="2025-09-30 10:07:54.030546716 +0000 UTC m=+1118.363806709" Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.037443 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.971849 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" event={"ID":"ec73b184-a60f-4477-8988-b9f6d937316c","Type":"ContainerStarted","Data":"72eef8489ae81e8440d26031ce68fc853c2bc87d0380c965b9e4fce1b7f0d3dc"} Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.971907 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.976259 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" event={"ID":"1018e1cd-c432-45b3-8267-0f37607cff2f","Type":"ContainerStarted","Data":"8173b41c8f9938466b105f7d6413e450a37d3f816577ba03de3eaf76b3608fd3"} Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.976489 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" event={"ID":"1018e1cd-c432-45b3-8267-0f37607cff2f","Type":"ContainerStarted","Data":"d2b86e5f0d332d29ba7a97e8ed26ab9114a86c64943320215e66da3583598cfd"} Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.979082 4730 generic.go:334] "Generic (PLEG): container finished" podID="a4f9bd21-5f86-4443-87be-eadb5d1c77f9" containerID="e752f863911725e4007eead2b8230aaef061b3efb1cf650c3a8365b075e404e5" exitCode=1 Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.979286 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"a4f9bd21-5f86-4443-87be-eadb5d1c77f9","Type":"ContainerDied","Data":"e752f863911725e4007eead2b8230aaef061b3efb1cf650c3a8365b075e404e5"} Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.979393 4730 scope.go:117] "RemoveContainer" containerID="f221a9aa65cec1a8d6cd07c7fd83d5f118dd091b859e3d97e66657018b1a7af5" Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.980057 4730 scope.go:117] "RemoveContainer" containerID="e752f863911725e4007eead2b8230aaef061b3efb1cf650c3a8365b075e404e5" Sep 30 10:07:54 crc kubenswrapper[4730]: E0930 10:07:54.980512 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(a4f9bd21-5f86-4443-87be-eadb5d1c77f9)\"" pod="openstack/watcher-decision-engine-0" podUID="a4f9bd21-5f86-4443-87be-eadb5d1c77f9" Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.985464 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-665f7f89df-b6qmw" event={"ID":"87696e9f-ed08-459f-80b7-c4c5499e4157","Type":"ContainerStarted","Data":"1361a1fece503b981b3c72de57123d0a223a5f5e909c00d3dd308ae300ff3bbc"} Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.985738 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Sep 30 10:07:54 crc kubenswrapper[4730]: I0930 10:07:54.985848 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-665f7f89df-b6qmw" event={"ID":"87696e9f-ed08-459f-80b7-c4c5499e4157","Type":"ContainerStarted","Data":"0cacb13a7f63fe96a3d2759b670e5b803588c406e942c21294a7152e64ea92e7"} Sep 30 10:07:55 crc kubenswrapper[4730]: I0930 10:07:55.008518 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" podStartSLOduration=12.008490681 podStartE2EDuration="12.008490681s" podCreationTimestamp="2025-09-30 10:07:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:07:54.995385817 +0000 UTC m=+1119.328645850" watchObservedRunningTime="2025-09-30 10:07:55.008490681 +0000 UTC m=+1119.341750704" Sep 30 10:07:55 crc kubenswrapper[4730]: I0930 10:07:55.029694 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-665f7f89df-b6qmw" podStartSLOduration=11.808385935 podStartE2EDuration="13.02966832s" podCreationTimestamp="2025-09-30 10:07:42 +0000 UTC" firstStartedPulling="2025-09-30 10:07:52.878529653 +0000 UTC m=+1117.211789646" lastFinishedPulling="2025-09-30 10:07:54.099812038 +0000 UTC m=+1118.433072031" observedRunningTime="2025-09-30 10:07:55.012950344 +0000 UTC m=+1119.346210367" watchObservedRunningTime="2025-09-30 10:07:55.02966832 +0000 UTC m=+1119.362928343" Sep 30 10:07:55 crc kubenswrapper[4730]: I0930 10:07:55.043848 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6cdddb844-vsgsp" podStartSLOduration=11.828366093 podStartE2EDuration="13.043810099s" podCreationTimestamp="2025-09-30 10:07:42 +0000 UTC" firstStartedPulling="2025-09-30 10:07:52.885924672 +0000 UTC m=+1117.219184665" lastFinishedPulling="2025-09-30 10:07:54.101368678 +0000 UTC m=+1118.434628671" observedRunningTime="2025-09-30 10:07:55.039725205 +0000 UTC m=+1119.372985208" watchObservedRunningTime="2025-09-30 10:07:55.043810099 +0000 UTC m=+1119.377070122" Sep 30 10:07:55 crc kubenswrapper[4730]: I0930 10:07:55.996013 4730 scope.go:117] "RemoveContainer" containerID="e752f863911725e4007eead2b8230aaef061b3efb1cf650c3a8365b075e404e5" Sep 30 10:07:55 crc kubenswrapper[4730]: E0930 10:07:55.996519 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(a4f9bd21-5f86-4443-87be-eadb5d1c77f9)\"" pod="openstack/watcher-decision-engine-0" podUID="a4f9bd21-5f86-4443-87be-eadb5d1c77f9" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.001510 4730 generic.go:334] "Generic (PLEG): container finished" podID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerID="04331fc8abaa27fb293dc73e41c6257c32f9236443a99c36e822aa0f750cbd51" exitCode=0 Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.002574 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1acf6558-7e16-4d00-b19f-c757db81dc58","Type":"ContainerDied","Data":"04331fc8abaa27fb293dc73e41c6257c32f9236443a99c36e822aa0f750cbd51"} Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.104245 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.192824 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-api-0" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.226529 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-combined-ca-bundle\") pod \"1acf6558-7e16-4d00-b19f-c757db81dc58\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.226642 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr2fv\" (UniqueName: \"kubernetes.io/projected/1acf6558-7e16-4d00-b19f-c757db81dc58-kube-api-access-kr2fv\") pod \"1acf6558-7e16-4d00-b19f-c757db81dc58\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.226699 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-sg-core-conf-yaml\") pod \"1acf6558-7e16-4d00-b19f-c757db81dc58\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.226730 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-config-data\") pod \"1acf6558-7e16-4d00-b19f-c757db81dc58\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.226850 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-scripts\") pod \"1acf6558-7e16-4d00-b19f-c757db81dc58\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.226943 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1acf6558-7e16-4d00-b19f-c757db81dc58-log-httpd\") pod \"1acf6558-7e16-4d00-b19f-c757db81dc58\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.227402 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1acf6558-7e16-4d00-b19f-c757db81dc58-run-httpd\") pod \"1acf6558-7e16-4d00-b19f-c757db81dc58\" (UID: \"1acf6558-7e16-4d00-b19f-c757db81dc58\") " Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.227327 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1acf6558-7e16-4d00-b19f-c757db81dc58-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1acf6558-7e16-4d00-b19f-c757db81dc58" (UID: "1acf6558-7e16-4d00-b19f-c757db81dc58"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.227698 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1acf6558-7e16-4d00-b19f-c757db81dc58-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1acf6558-7e16-4d00-b19f-c757db81dc58" (UID: "1acf6558-7e16-4d00-b19f-c757db81dc58"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.227993 4730 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1acf6558-7e16-4d00-b19f-c757db81dc58-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.228123 4730 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1acf6558-7e16-4d00-b19f-c757db81dc58-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.232258 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1acf6558-7e16-4d00-b19f-c757db81dc58-kube-api-access-kr2fv" (OuterVolumeSpecName: "kube-api-access-kr2fv") pod "1acf6558-7e16-4d00-b19f-c757db81dc58" (UID: "1acf6558-7e16-4d00-b19f-c757db81dc58"). InnerVolumeSpecName "kube-api-access-kr2fv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.232766 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-scripts" (OuterVolumeSpecName: "scripts") pod "1acf6558-7e16-4d00-b19f-c757db81dc58" (UID: "1acf6558-7e16-4d00-b19f-c757db81dc58"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.254516 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1acf6558-7e16-4d00-b19f-c757db81dc58" (UID: "1acf6558-7e16-4d00-b19f-c757db81dc58"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.305730 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1acf6558-7e16-4d00-b19f-c757db81dc58" (UID: "1acf6558-7e16-4d00-b19f-c757db81dc58"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.330078 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.330110 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.330139 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr2fv\" (UniqueName: \"kubernetes.io/projected/1acf6558-7e16-4d00-b19f-c757db81dc58-kube-api-access-kr2fv\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.330153 4730 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.352380 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-config-data" (OuterVolumeSpecName: "config-data") pod "1acf6558-7e16-4d00-b19f-c757db81dc58" (UID: "1acf6558-7e16-4d00-b19f-c757db81dc58"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:56 crc kubenswrapper[4730]: I0930 10:07:56.431921 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1acf6558-7e16-4d00-b19f-c757db81dc58-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.018289 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1acf6558-7e16-4d00-b19f-c757db81dc58","Type":"ContainerDied","Data":"733792964d1b647379ec2c992f12fc032da7cf1cc94dae88578bb2138b614226"} Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.018497 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.019796 4730 scope.go:117] "RemoveContainer" containerID="79f29aeeff204b49d08af2ecf53bef3de5b7df8bf2821c68577269521cadca57" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.023948 4730 generic.go:334] "Generic (PLEG): container finished" podID="577c636c-9e1c-4e65-b164-dcc8e200d7c1" containerID="db9ce9066063546a453449bd45807592f03643733df7fdeb5f1f204ff80fd0fc" exitCode=0 Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.024255 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-cmvzl" event={"ID":"577c636c-9e1c-4e65-b164-dcc8e200d7c1","Type":"ContainerDied","Data":"db9ce9066063546a453449bd45807592f03643733df7fdeb5f1f204ff80fd0fc"} Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.025032 4730 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.077868 4730 scope.go:117] "RemoveContainer" containerID="35170d1b138d98c0ee51c133179a1e6f61e35a90307e81f46017a14a894c7e5d" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.083115 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.095898 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.110329 4730 scope.go:117] "RemoveContainer" containerID="04331fc8abaa27fb293dc73e41c6257c32f9236443a99c36e822aa0f750cbd51" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.120926 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:07:57 crc kubenswrapper[4730]: E0930 10:07:57.121502 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="proxy-httpd" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.121528 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="proxy-httpd" Sep 30 10:07:57 crc kubenswrapper[4730]: E0930 10:07:57.121561 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="ceilometer-central-agent" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.121570 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="ceilometer-central-agent" Sep 30 10:07:57 crc kubenswrapper[4730]: E0930 10:07:57.121596 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="ceilometer-notification-agent" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.121622 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="ceilometer-notification-agent" Sep 30 10:07:57 crc kubenswrapper[4730]: E0930 10:07:57.121641 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="sg-core" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.121649 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="sg-core" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.121864 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="sg-core" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.121887 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="ceilometer-notification-agent" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.121902 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="proxy-httpd" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.121920 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" containerName="ceilometer-central-agent" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.123780 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.131222 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.131396 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.132330 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.145281 4730 scope.go:117] "RemoveContainer" containerID="dd1ce5807810f86dce08a7d2b15eca3cbad8bbaffbafef2cf992195da82d0d24" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.247450 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/522b231d-b240-485c-b52e-fbf2a2fe44bf-run-httpd\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.247867 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-config-data\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.248016 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.248164 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjx9d\" (UniqueName: \"kubernetes.io/projected/522b231d-b240-485c-b52e-fbf2a2fe44bf-kube-api-access-sjx9d\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.248306 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-scripts\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.248435 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/522b231d-b240-485c-b52e-fbf2a2fe44bf-log-httpd\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.248632 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.350089 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjx9d\" (UniqueName: \"kubernetes.io/projected/522b231d-b240-485c-b52e-fbf2a2fe44bf-kube-api-access-sjx9d\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.350175 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-scripts\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.350214 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/522b231d-b240-485c-b52e-fbf2a2fe44bf-log-httpd\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.350399 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.350556 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/522b231d-b240-485c-b52e-fbf2a2fe44bf-run-httpd\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.350601 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-config-data\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.350721 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.350799 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/522b231d-b240-485c-b52e-fbf2a2fe44bf-log-httpd\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.351368 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/522b231d-b240-485c-b52e-fbf2a2fe44bf-run-httpd\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.356008 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.358004 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-scripts\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.371755 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.375834 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjx9d\" (UniqueName: \"kubernetes.io/projected/522b231d-b240-485c-b52e-fbf2a2fe44bf-kube-api-access-sjx9d\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.376258 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-config-data\") pod \"ceilometer-0\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.451434 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.456136 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:07:57 crc kubenswrapper[4730]: I0930 10:07:57.920270 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:07:57 crc kubenswrapper[4730]: W0930 10:07:57.925583 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod522b231d_b240_485c_b52e_fbf2a2fe44bf.slice/crio-2060a76e00e7a1cbeb64c65b1f04e7b01d5e6b6f3f6afbf44e712853f14df38b WatchSource:0}: Error finding container 2060a76e00e7a1cbeb64c65b1f04e7b01d5e6b6f3f6afbf44e712853f14df38b: Status 404 returned error can't find the container with id 2060a76e00e7a1cbeb64c65b1f04e7b01d5e6b6f3f6afbf44e712853f14df38b Sep 30 10:07:58 crc kubenswrapper[4730]: I0930 10:07:58.033881 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"522b231d-b240-485c-b52e-fbf2a2fe44bf","Type":"ContainerStarted","Data":"2060a76e00e7a1cbeb64c65b1f04e7b01d5e6b6f3f6afbf44e712853f14df38b"} Sep 30 10:07:58 crc kubenswrapper[4730]: I0930 10:07:58.390336 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1acf6558-7e16-4d00-b19f-c757db81dc58" path="/var/lib/kubelet/pods/1acf6558-7e16-4d00-b19f-c757db81dc58/volumes" Sep 30 10:07:58 crc kubenswrapper[4730]: I0930 10:07:58.399835 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-cmvzl" Sep 30 10:07:58 crc kubenswrapper[4730]: I0930 10:07:58.476537 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfc74\" (UniqueName: \"kubernetes.io/projected/577c636c-9e1c-4e65-b164-dcc8e200d7c1-kube-api-access-qfc74\") pod \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\" (UID: \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\") " Sep 30 10:07:58 crc kubenswrapper[4730]: I0930 10:07:58.476686 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/577c636c-9e1c-4e65-b164-dcc8e200d7c1-combined-ca-bundle\") pod \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\" (UID: \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\") " Sep 30 10:07:58 crc kubenswrapper[4730]: I0930 10:07:58.476736 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/577c636c-9e1c-4e65-b164-dcc8e200d7c1-config\") pod \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\" (UID: \"577c636c-9e1c-4e65-b164-dcc8e200d7c1\") " Sep 30 10:07:58 crc kubenswrapper[4730]: I0930 10:07:58.482454 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/577c636c-9e1c-4e65-b164-dcc8e200d7c1-kube-api-access-qfc74" (OuterVolumeSpecName: "kube-api-access-qfc74") pod "577c636c-9e1c-4e65-b164-dcc8e200d7c1" (UID: "577c636c-9e1c-4e65-b164-dcc8e200d7c1"). InnerVolumeSpecName "kube-api-access-qfc74". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:07:58 crc kubenswrapper[4730]: I0930 10:07:58.502209 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/577c636c-9e1c-4e65-b164-dcc8e200d7c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "577c636c-9e1c-4e65-b164-dcc8e200d7c1" (UID: "577c636c-9e1c-4e65-b164-dcc8e200d7c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:58 crc kubenswrapper[4730]: I0930 10:07:58.505968 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/577c636c-9e1c-4e65-b164-dcc8e200d7c1-config" (OuterVolumeSpecName: "config") pod "577c636c-9e1c-4e65-b164-dcc8e200d7c1" (UID: "577c636c-9e1c-4e65-b164-dcc8e200d7c1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:07:58 crc kubenswrapper[4730]: I0930 10:07:58.577929 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfc74\" (UniqueName: \"kubernetes.io/projected/577c636c-9e1c-4e65-b164-dcc8e200d7c1-kube-api-access-qfc74\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:58 crc kubenswrapper[4730]: I0930 10:07:58.577962 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/577c636c-9e1c-4e65-b164-dcc8e200d7c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:58 crc kubenswrapper[4730]: I0930 10:07:58.577972 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/577c636c-9e1c-4e65-b164-dcc8e200d7c1-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.057966 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"522b231d-b240-485c-b52e-fbf2a2fe44bf","Type":"ContainerStarted","Data":"96d0060e426320737b668a20b50713e0063ccf78a300b5f83f97b8eacddaf3ee"} Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.058361 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"522b231d-b240-485c-b52e-fbf2a2fe44bf","Type":"ContainerStarted","Data":"00f330622caf2a6afaff47b297f4c11aa71f7e71c60fec3679b1fbf6df90c535"} Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.067536 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-cmvzl" event={"ID":"577c636c-9e1c-4e65-b164-dcc8e200d7c1","Type":"ContainerDied","Data":"382a5f83342243e3b596f071c1b2f8a9c5fa04f7b6f9520da54458fcd6ce5661"} Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.067588 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="382a5f83342243e3b596f071c1b2f8a9c5fa04f7b6f9520da54458fcd6ce5661" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.067623 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-cmvzl" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.248089 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8469d44f79-gv4w7"] Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.248370 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" podUID="ec73b184-a60f-4477-8988-b9f6d937316c" containerName="dnsmasq-dns" containerID="cri-o://72eef8489ae81e8440d26031ce68fc853c2bc87d0380c965b9e4fce1b7f0d3dc" gracePeriod=10 Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.251511 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.276297 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66f5888b75-zf578"] Sep 30 10:07:59 crc kubenswrapper[4730]: E0930 10:07:59.276730 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="577c636c-9e1c-4e65-b164-dcc8e200d7c1" containerName="neutron-db-sync" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.276744 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="577c636c-9e1c-4e65-b164-dcc8e200d7c1" containerName="neutron-db-sync" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.276923 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="577c636c-9e1c-4e65-b164-dcc8e200d7c1" containerName="neutron-db-sync" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.277926 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.305285 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66f5888b75-zf578"] Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.379643 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-f956cddd4-sbtc4"] Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.381780 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.385160 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.385362 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.385469 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-97m5r" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.394051 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.401368 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-dns-svc\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.401478 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-combined-ca-bundle\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.401506 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-ovsdbserver-nb\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.401538 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-config\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.401556 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-httpd-config\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.401604 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-ovndb-tls-certs\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.401638 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9qx7\" (UniqueName: \"kubernetes.io/projected/bc4d7fc4-7053-4985-831a-093ad13a9f45-kube-api-access-z9qx7\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.401661 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdzvc\" (UniqueName: \"kubernetes.io/projected/1e41c10c-2d54-4df9-bda4-44ae2eba244d-kube-api-access-qdzvc\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.401682 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-config\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.401771 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-ovsdbserver-sb\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.402568 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f956cddd4-sbtc4"] Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.503600 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-ovsdbserver-nb\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.503891 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-combined-ca-bundle\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.503925 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-config\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.503940 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-httpd-config\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.503987 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-ovndb-tls-certs\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.504007 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9qx7\" (UniqueName: \"kubernetes.io/projected/bc4d7fc4-7053-4985-831a-093ad13a9f45-kube-api-access-z9qx7\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.504027 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdzvc\" (UniqueName: \"kubernetes.io/projected/1e41c10c-2d54-4df9-bda4-44ae2eba244d-kube-api-access-qdzvc\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.504044 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-config\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.504071 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-ovsdbserver-sb\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.504114 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-dns-svc\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.504995 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-dns-svc\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.506262 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-ovsdbserver-nb\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.510502 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-config\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.511973 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-ovsdbserver-sb\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.518917 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-httpd-config\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.519573 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-combined-ca-bundle\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.521325 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-config\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.522251 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-ovndb-tls-certs\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.533093 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdzvc\" (UniqueName: \"kubernetes.io/projected/1e41c10c-2d54-4df9-bda4-44ae2eba244d-kube-api-access-qdzvc\") pod \"dnsmasq-dns-66f5888b75-zf578\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.533726 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9qx7\" (UniqueName: \"kubernetes.io/projected/bc4d7fc4-7053-4985-831a-093ad13a9f45-kube-api-access-z9qx7\") pod \"neutron-f956cddd4-sbtc4\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.761072 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.775063 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:07:59 crc kubenswrapper[4730]: I0930 10:07:59.844157 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.030479 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-config\") pod \"ec73b184-a60f-4477-8988-b9f6d937316c\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.030836 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-ovsdbserver-nb\") pod \"ec73b184-a60f-4477-8988-b9f6d937316c\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.030900 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8m87\" (UniqueName: \"kubernetes.io/projected/ec73b184-a60f-4477-8988-b9f6d937316c-kube-api-access-j8m87\") pod \"ec73b184-a60f-4477-8988-b9f6d937316c\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.030932 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-dns-svc\") pod \"ec73b184-a60f-4477-8988-b9f6d937316c\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.031090 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-ovsdbserver-sb\") pod \"ec73b184-a60f-4477-8988-b9f6d937316c\" (UID: \"ec73b184-a60f-4477-8988-b9f6d937316c\") " Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.073065 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec73b184-a60f-4477-8988-b9f6d937316c-kube-api-access-j8m87" (OuterVolumeSpecName: "kube-api-access-j8m87") pod "ec73b184-a60f-4477-8988-b9f6d937316c" (UID: "ec73b184-a60f-4477-8988-b9f6d937316c"). InnerVolumeSpecName "kube-api-access-j8m87". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.118443 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ec73b184-a60f-4477-8988-b9f6d937316c" (UID: "ec73b184-a60f-4477-8988-b9f6d937316c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.121363 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ec73b184-a60f-4477-8988-b9f6d937316c" (UID: "ec73b184-a60f-4477-8988-b9f6d937316c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.136998 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8m87\" (UniqueName: \"kubernetes.io/projected/ec73b184-a60f-4477-8988-b9f6d937316c-kube-api-access-j8m87\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.137018 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.137026 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.137120 4730 generic.go:334] "Generic (PLEG): container finished" podID="ec73b184-a60f-4477-8988-b9f6d937316c" containerID="72eef8489ae81e8440d26031ce68fc853c2bc87d0380c965b9e4fce1b7f0d3dc" exitCode=0 Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.137168 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" event={"ID":"ec73b184-a60f-4477-8988-b9f6d937316c","Type":"ContainerDied","Data":"72eef8489ae81e8440d26031ce68fc853c2bc87d0380c965b9e4fce1b7f0d3dc"} Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.137194 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" event={"ID":"ec73b184-a60f-4477-8988-b9f6d937316c","Type":"ContainerDied","Data":"c7668521126261df6e12638f93e18d3a2e3839fbfe4c23b5dafe0ddd85824960"} Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.137210 4730 scope.go:117] "RemoveContainer" containerID="72eef8489ae81e8440d26031ce68fc853c2bc87d0380c965b9e4fce1b7f0d3dc" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.137298 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8469d44f79-gv4w7" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.145250 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ec73b184-a60f-4477-8988-b9f6d937316c" (UID: "ec73b184-a60f-4477-8988-b9f6d937316c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.155004 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"522b231d-b240-485c-b52e-fbf2a2fe44bf","Type":"ContainerStarted","Data":"5c459915c72a30167105b239d3121c064572a723a37eb6cd058065e0a964e5fd"} Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.167661 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-config" (OuterVolumeSpecName: "config") pod "ec73b184-a60f-4477-8988-b9f6d937316c" (UID: "ec73b184-a60f-4477-8988-b9f6d937316c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.227283 4730 scope.go:117] "RemoveContainer" containerID="bf5c05e2975ac0e010a3af6d68dbbbdf5a572e6fe490cb40dd2f9f5dda9d17b9" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.239441 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.239473 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec73b184-a60f-4477-8988-b9f6d937316c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.261792 4730 scope.go:117] "RemoveContainer" containerID="72eef8489ae81e8440d26031ce68fc853c2bc87d0380c965b9e4fce1b7f0d3dc" Sep 30 10:08:00 crc kubenswrapper[4730]: E0930 10:08:00.266792 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72eef8489ae81e8440d26031ce68fc853c2bc87d0380c965b9e4fce1b7f0d3dc\": container with ID starting with 72eef8489ae81e8440d26031ce68fc853c2bc87d0380c965b9e4fce1b7f0d3dc not found: ID does not exist" containerID="72eef8489ae81e8440d26031ce68fc853c2bc87d0380c965b9e4fce1b7f0d3dc" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.266841 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72eef8489ae81e8440d26031ce68fc853c2bc87d0380c965b9e4fce1b7f0d3dc"} err="failed to get container status \"72eef8489ae81e8440d26031ce68fc853c2bc87d0380c965b9e4fce1b7f0d3dc\": rpc error: code = NotFound desc = could not find container \"72eef8489ae81e8440d26031ce68fc853c2bc87d0380c965b9e4fce1b7f0d3dc\": container with ID starting with 72eef8489ae81e8440d26031ce68fc853c2bc87d0380c965b9e4fce1b7f0d3dc not found: ID does not exist" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.266867 4730 scope.go:117] "RemoveContainer" containerID="bf5c05e2975ac0e010a3af6d68dbbbdf5a572e6fe490cb40dd2f9f5dda9d17b9" Sep 30 10:08:00 crc kubenswrapper[4730]: E0930 10:08:00.268040 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf5c05e2975ac0e010a3af6d68dbbbdf5a572e6fe490cb40dd2f9f5dda9d17b9\": container with ID starting with bf5c05e2975ac0e010a3af6d68dbbbdf5a572e6fe490cb40dd2f9f5dda9d17b9 not found: ID does not exist" containerID="bf5c05e2975ac0e010a3af6d68dbbbdf5a572e6fe490cb40dd2f9f5dda9d17b9" Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.268113 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf5c05e2975ac0e010a3af6d68dbbbdf5a572e6fe490cb40dd2f9f5dda9d17b9"} err="failed to get container status \"bf5c05e2975ac0e010a3af6d68dbbbdf5a572e6fe490cb40dd2f9f5dda9d17b9\": rpc error: code = NotFound desc = could not find container \"bf5c05e2975ac0e010a3af6d68dbbbdf5a572e6fe490cb40dd2f9f5dda9d17b9\": container with ID starting with bf5c05e2975ac0e010a3af6d68dbbbdf5a572e6fe490cb40dd2f9f5dda9d17b9 not found: ID does not exist" Sep 30 10:08:00 crc kubenswrapper[4730]: W0930 10:08:00.315844 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e41c10c_2d54_4df9_bda4_44ae2eba244d.slice/crio-8a27c90ca5297a4ddb443e04f62b6a1557e0ad7b0dc01868fb4426c074743f16 WatchSource:0}: Error finding container 8a27c90ca5297a4ddb443e04f62b6a1557e0ad7b0dc01868fb4426c074743f16: Status 404 returned error can't find the container with id 8a27c90ca5297a4ddb443e04f62b6a1557e0ad7b0dc01868fb4426c074743f16 Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.320543 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66f5888b75-zf578"] Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.473552 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8469d44f79-gv4w7"] Sep 30 10:08:00 crc kubenswrapper[4730]: I0930 10:08:00.482255 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8469d44f79-gv4w7"] Sep 30 10:08:01 crc kubenswrapper[4730]: I0930 10:08:01.172524 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"522b231d-b240-485c-b52e-fbf2a2fe44bf","Type":"ContainerStarted","Data":"b08ba61ac80398fe5be6ce46ba02560ddb080e7f76384ef8a1175ddbe761af49"} Sep 30 10:08:01 crc kubenswrapper[4730]: I0930 10:08:01.172905 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 10:08:01 crc kubenswrapper[4730]: I0930 10:08:01.175913 4730 generic.go:334] "Generic (PLEG): container finished" podID="1e41c10c-2d54-4df9-bda4-44ae2eba244d" containerID="b3c141381dfcb9c52caa9056b6c0033b9394c79be915aa6268a991421e3d2f6b" exitCode=0 Sep 30 10:08:01 crc kubenswrapper[4730]: I0930 10:08:01.176042 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f5888b75-zf578" event={"ID":"1e41c10c-2d54-4df9-bda4-44ae2eba244d","Type":"ContainerDied","Data":"b3c141381dfcb9c52caa9056b6c0033b9394c79be915aa6268a991421e3d2f6b"} Sep 30 10:08:01 crc kubenswrapper[4730]: I0930 10:08:01.176100 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f5888b75-zf578" event={"ID":"1e41c10c-2d54-4df9-bda4-44ae2eba244d","Type":"ContainerStarted","Data":"8a27c90ca5297a4ddb443e04f62b6a1557e0ad7b0dc01868fb4426c074743f16"} Sep 30 10:08:01 crc kubenswrapper[4730]: I0930 10:08:01.193948 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-api-0" Sep 30 10:08:01 crc kubenswrapper[4730]: I0930 10:08:01.233046 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.351696357 podStartE2EDuration="4.233027345s" podCreationTimestamp="2025-09-30 10:07:57 +0000 UTC" firstStartedPulling="2025-09-30 10:07:57.928670757 +0000 UTC m=+1122.261930750" lastFinishedPulling="2025-09-30 10:08:00.810001745 +0000 UTC m=+1125.143261738" observedRunningTime="2025-09-30 10:08:01.232950133 +0000 UTC m=+1125.566210136" watchObservedRunningTime="2025-09-30 10:08:01.233027345 +0000 UTC m=+1125.566287338" Sep 30 10:08:01 crc kubenswrapper[4730]: I0930 10:08:01.242041 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-api-0" Sep 30 10:08:01 crc kubenswrapper[4730]: I0930 10:08:01.368212 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f956cddd4-sbtc4"] Sep 30 10:08:01 crc kubenswrapper[4730]: I0930 10:08:01.727210 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:08:01 crc kubenswrapper[4730]: I0930 10:08:01.857497 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.079954 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-9cf79fcd5-4nrfn"] Sep 30 10:08:02 crc kubenswrapper[4730]: E0930 10:08:02.080382 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec73b184-a60f-4477-8988-b9f6d937316c" containerName="init" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.080401 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec73b184-a60f-4477-8988-b9f6d937316c" containerName="init" Sep 30 10:08:02 crc kubenswrapper[4730]: E0930 10:08:02.080461 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec73b184-a60f-4477-8988-b9f6d937316c" containerName="dnsmasq-dns" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.080469 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec73b184-a60f-4477-8988-b9f6d937316c" containerName="dnsmasq-dns" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.080652 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec73b184-a60f-4477-8988-b9f6d937316c" containerName="dnsmasq-dns" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.081878 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.084842 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.085539 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.094763 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9cf79fcd5-4nrfn"] Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.108819 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-ovndb-tls-certs\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.108868 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-httpd-config\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.108918 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-internal-tls-certs\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.108941 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgkxz\" (UniqueName: \"kubernetes.io/projected/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-kube-api-access-tgkxz\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.109002 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-config\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.109029 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-combined-ca-bundle\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.109056 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-public-tls-certs\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.209737 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-public-tls-certs\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.209801 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-ovndb-tls-certs\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.209844 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-httpd-config\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.209907 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-internal-tls-certs\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.209939 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgkxz\" (UniqueName: \"kubernetes.io/projected/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-kube-api-access-tgkxz\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.210017 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-config\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.210052 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-combined-ca-bundle\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.216750 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-public-tls-certs\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.223339 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f5888b75-zf578" event={"ID":"1e41c10c-2d54-4df9-bda4-44ae2eba244d","Type":"ContainerStarted","Data":"5b38ef64b24106bc16f7f756ed4b0b6fa4e16e1ee9911569f8de45ac8222186b"} Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.224540 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-combined-ca-bundle\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.224693 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.229309 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-httpd-config\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.245268 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-config\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.247264 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-ovndb-tls-certs\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.252491 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f956cddd4-sbtc4" event={"ID":"bc4d7fc4-7053-4985-831a-093ad13a9f45","Type":"ContainerStarted","Data":"809eeb547f49f15792831ab80ae1ae4ab3eceb9263a950f4d87aaa5556072bf2"} Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.252531 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f956cddd4-sbtc4" event={"ID":"bc4d7fc4-7053-4985-831a-093ad13a9f45","Type":"ContainerStarted","Data":"d7d350d869da1d66db5b82c41829cc173cb048a4a8c2e2f500f8b1dc0eaecd13"} Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.252541 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f956cddd4-sbtc4" event={"ID":"bc4d7fc4-7053-4985-831a-093ad13a9f45","Type":"ContainerStarted","Data":"116bdb07cc5d1cc17eb4a98e4aa51ba0f7d9a5d5461d6cf4cdcb647e26df66b6"} Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.252988 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-internal-tls-certs\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.271485 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgkxz\" (UniqueName: \"kubernetes.io/projected/2cb5a17e-0f48-4341-9a0e-9c84e63fed3b-kube-api-access-tgkxz\") pod \"neutron-9cf79fcd5-4nrfn\" (UID: \"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b\") " pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.293527 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-api-0" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.306709 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66f5888b75-zf578" podStartSLOduration=3.3066852239999998 podStartE2EDuration="3.306685224s" podCreationTimestamp="2025-09-30 10:07:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:08:02.271392356 +0000 UTC m=+1126.604652359" watchObservedRunningTime="2025-09-30 10:08:02.306685224 +0000 UTC m=+1126.639945217" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.338065 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.338112 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.338150 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.342523 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3472c6d9d1cf6cc70effd10d384c0280a404f7b8fcfc840434d206e9db23adc4"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.342637 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://3472c6d9d1cf6cc70effd10d384c0280a404f7b8fcfc840434d206e9db23adc4" gracePeriod=600 Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.459275 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec73b184-a60f-4477-8988-b9f6d937316c" path="/var/lib/kubelet/pods/ec73b184-a60f-4477-8988-b9f6d937316c/volumes" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.466488 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.995516 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 30 10:08:02 crc kubenswrapper[4730]: I0930 10:08:02.996512 4730 scope.go:117] "RemoveContainer" containerID="e752f863911725e4007eead2b8230aaef061b3efb1cf650c3a8365b075e404e5" Sep 30 10:08:02 crc kubenswrapper[4730]: E0930 10:08:02.996873 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 10s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(a4f9bd21-5f86-4443-87be-eadb5d1c77f9)\"" pod="openstack/watcher-decision-engine-0" podUID="a4f9bd21-5f86-4443-87be-eadb5d1c77f9" Sep 30 10:08:03 crc kubenswrapper[4730]: I0930 10:08:03.152794 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:08:03 crc kubenswrapper[4730]: I0930 10:08:03.284141 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7d48d7c7fd-7l8hx" Sep 30 10:08:03 crc kubenswrapper[4730]: I0930 10:08:03.290027 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="3472c6d9d1cf6cc70effd10d384c0280a404f7b8fcfc840434d206e9db23adc4" exitCode=0 Sep 30 10:08:03 crc kubenswrapper[4730]: I0930 10:08:03.290698 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"3472c6d9d1cf6cc70effd10d384c0280a404f7b8fcfc840434d206e9db23adc4"} Sep 30 10:08:03 crc kubenswrapper[4730]: I0930 10:08:03.290742 4730 scope.go:117] "RemoveContainer" containerID="900986cbdecf38d2005d5e11f37ce0d1a6c8ab5af66f64b87d1373420d3568ee" Sep 30 10:08:03 crc kubenswrapper[4730]: I0930 10:08:03.292335 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:08:03 crc kubenswrapper[4730]: I0930 10:08:03.437672 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-f956cddd4-sbtc4" podStartSLOduration=4.43764766 podStartE2EDuration="4.43764766s" podCreationTimestamp="2025-09-30 10:07:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:08:03.362030457 +0000 UTC m=+1127.695290460" watchObservedRunningTime="2025-09-30 10:08:03.43764766 +0000 UTC m=+1127.770907653" Sep 30 10:08:03 crc kubenswrapper[4730]: I0930 10:08:03.442815 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-9cf79fcd5-4nrfn"] Sep 30 10:08:03 crc kubenswrapper[4730]: W0930 10:08:03.443427 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2cb5a17e_0f48_4341_9a0e_9c84e63fed3b.slice/crio-aa638a15458d31a7e9f218c9caf6866c31019bcb787787b17a7c4f2c3c22cff4 WatchSource:0}: Error finding container aa638a15458d31a7e9f218c9caf6866c31019bcb787787b17a7c4f2c3c22cff4: Status 404 returned error can't find the container with id aa638a15458d31a7e9f218c9caf6866c31019bcb787787b17a7c4f2c3c22cff4 Sep 30 10:08:04 crc kubenswrapper[4730]: I0930 10:08:04.187719 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:08:04 crc kubenswrapper[4730]: I0930 10:08:04.303243 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9cf79fcd5-4nrfn" event={"ID":"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b","Type":"ContainerStarted","Data":"aa638a15458d31a7e9f218c9caf6866c31019bcb787787b17a7c4f2c3c22cff4"} Sep 30 10:08:04 crc kubenswrapper[4730]: I0930 10:08:04.475122 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-54cc89d54-v6hb5" Sep 30 10:08:04 crc kubenswrapper[4730]: I0930 10:08:04.525592 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-69f567c75d-8tg9t"] Sep 30 10:08:04 crc kubenswrapper[4730]: I0930 10:08:04.525835 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-69f567c75d-8tg9t" podUID="a5cb84ea-d720-4c98-a515-e28490dd2af4" containerName="barbican-api-log" containerID="cri-o://176c613290be40bf9e30218ba39a996dbcdbd04898e894727efcccf97ebe7fc9" gracePeriod=30 Sep 30 10:08:04 crc kubenswrapper[4730]: I0930 10:08:04.525913 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-69f567c75d-8tg9t" podUID="a5cb84ea-d720-4c98-a515-e28490dd2af4" containerName="barbican-api" containerID="cri-o://cc77bf0faf285f46d37cda0de4d2ec462c83281672fcb1dd85cd84b88861edd3" gracePeriod=30 Sep 30 10:08:04 crc kubenswrapper[4730]: I0930 10:08:04.533949 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-69f567c75d-8tg9t" podUID="a5cb84ea-d720-4c98-a515-e28490dd2af4" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": EOF" Sep 30 10:08:05 crc kubenswrapper[4730]: I0930 10:08:05.330050 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9cf79fcd5-4nrfn" event={"ID":"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b","Type":"ContainerStarted","Data":"ec1e241bf13ebee8971c052a2fd36eb7817c90753aaff4bb337a7b2e235ce867"} Sep 30 10:08:05 crc kubenswrapper[4730]: I0930 10:08:05.372267 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"d80d6bf84aad0f9e13029ef1a54a6e376ee3848702f4ba4ce0570e2a35ec8e0c"} Sep 30 10:08:05 crc kubenswrapper[4730]: I0930 10:08:05.387940 4730 generic.go:334] "Generic (PLEG): container finished" podID="a5cb84ea-d720-4c98-a515-e28490dd2af4" containerID="176c613290be40bf9e30218ba39a996dbcdbd04898e894727efcccf97ebe7fc9" exitCode=143 Sep 30 10:08:05 crc kubenswrapper[4730]: I0930 10:08:05.387991 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-69f567c75d-8tg9t" event={"ID":"a5cb84ea-d720-4c98-a515-e28490dd2af4","Type":"ContainerDied","Data":"176c613290be40bf9e30218ba39a996dbcdbd04898e894727efcccf97ebe7fc9"} Sep 30 10:08:06 crc kubenswrapper[4730]: I0930 10:08:06.397537 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-9cf79fcd5-4nrfn" event={"ID":"2cb5a17e-0f48-4341-9a0e-9c84e63fed3b","Type":"ContainerStarted","Data":"b94702a397e4aed80459ba045d0979be58327c2daaf46a9d3b7be065f0e7fd6a"} Sep 30 10:08:06 crc kubenswrapper[4730]: I0930 10:08:06.397971 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:06 crc kubenswrapper[4730]: I0930 10:08:06.399315 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-wrvww" event={"ID":"488816e9-d4e6-4956-9671-c9de4118821c","Type":"ContainerStarted","Data":"514ef0c0b6f7f016c7be6ed49a18e331ed0e8c99322854a31d779f4ece6ba4f5"} Sep 30 10:08:06 crc kubenswrapper[4730]: I0930 10:08:06.434022 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-9cf79fcd5-4nrfn" podStartSLOduration=4.433996023 podStartE2EDuration="4.433996023s" podCreationTimestamp="2025-09-30 10:08:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:08:06.4240436 +0000 UTC m=+1130.757303593" watchObservedRunningTime="2025-09-30 10:08:06.433996023 +0000 UTC m=+1130.767256016" Sep 30 10:08:06 crc kubenswrapper[4730]: I0930 10:08:06.456859 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-wrvww" podStartSLOduration=9.737987377 podStartE2EDuration="42.456834844s" podCreationTimestamp="2025-09-30 10:07:24 +0000 UTC" firstStartedPulling="2025-09-30 10:07:32.429476115 +0000 UTC m=+1096.762736108" lastFinishedPulling="2025-09-30 10:08:05.148323582 +0000 UTC m=+1129.481583575" observedRunningTime="2025-09-30 10:08:06.45392801 +0000 UTC m=+1130.787188003" watchObservedRunningTime="2025-09-30 10:08:06.456834844 +0000 UTC m=+1130.790094837" Sep 30 10:08:07 crc kubenswrapper[4730]: I0930 10:08:07.390136 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7dfc9d946-psk9c" Sep 30 10:08:07 crc kubenswrapper[4730]: I0930 10:08:07.944627 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.099534 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-config-data\") pod \"a5cb84ea-d720-4c98-a515-e28490dd2af4\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.101111 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mfjw\" (UniqueName: \"kubernetes.io/projected/a5cb84ea-d720-4c98-a515-e28490dd2af4-kube-api-access-6mfjw\") pod \"a5cb84ea-d720-4c98-a515-e28490dd2af4\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.101227 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5cb84ea-d720-4c98-a515-e28490dd2af4-logs\") pod \"a5cb84ea-d720-4c98-a515-e28490dd2af4\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.101404 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-combined-ca-bundle\") pod \"a5cb84ea-d720-4c98-a515-e28490dd2af4\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.101654 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5cb84ea-d720-4c98-a515-e28490dd2af4-logs" (OuterVolumeSpecName: "logs") pod "a5cb84ea-d720-4c98-a515-e28490dd2af4" (UID: "a5cb84ea-d720-4c98-a515-e28490dd2af4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.102162 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-config-data-custom\") pod \"a5cb84ea-d720-4c98-a515-e28490dd2af4\" (UID: \"a5cb84ea-d720-4c98-a515-e28490dd2af4\") " Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.102861 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5cb84ea-d720-4c98-a515-e28490dd2af4-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.109872 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5cb84ea-d720-4c98-a515-e28490dd2af4-kube-api-access-6mfjw" (OuterVolumeSpecName: "kube-api-access-6mfjw") pod "a5cb84ea-d720-4c98-a515-e28490dd2af4" (UID: "a5cb84ea-d720-4c98-a515-e28490dd2af4"). InnerVolumeSpecName "kube-api-access-6mfjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.114820 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a5cb84ea-d720-4c98-a515-e28490dd2af4" (UID: "a5cb84ea-d720-4c98-a515-e28490dd2af4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.133933 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5cb84ea-d720-4c98-a515-e28490dd2af4" (UID: "a5cb84ea-d720-4c98-a515-e28490dd2af4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.160653 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-config-data" (OuterVolumeSpecName: "config-data") pod "a5cb84ea-d720-4c98-a515-e28490dd2af4" (UID: "a5cb84ea-d720-4c98-a515-e28490dd2af4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.204638 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mfjw\" (UniqueName: \"kubernetes.io/projected/a5cb84ea-d720-4c98-a515-e28490dd2af4-kube-api-access-6mfjw\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.204811 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.204866 4730 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.204948 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5cb84ea-d720-4c98-a515-e28490dd2af4-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.417304 4730 generic.go:334] "Generic (PLEG): container finished" podID="a5cb84ea-d720-4c98-a515-e28490dd2af4" containerID="cc77bf0faf285f46d37cda0de4d2ec462c83281672fcb1dd85cd84b88861edd3" exitCode=0 Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.417349 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-69f567c75d-8tg9t" event={"ID":"a5cb84ea-d720-4c98-a515-e28490dd2af4","Type":"ContainerDied","Data":"cc77bf0faf285f46d37cda0de4d2ec462c83281672fcb1dd85cd84b88861edd3"} Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.417382 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-69f567c75d-8tg9t" event={"ID":"a5cb84ea-d720-4c98-a515-e28490dd2af4","Type":"ContainerDied","Data":"5a8fae8938d0bdab036da21a87787c0b16a7a8718aeb249531bf43500945dac6"} Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.417356 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-69f567c75d-8tg9t" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.417403 4730 scope.go:117] "RemoveContainer" containerID="cc77bf0faf285f46d37cda0de4d2ec462c83281672fcb1dd85cd84b88861edd3" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.444278 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-69f567c75d-8tg9t"] Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.446821 4730 scope.go:117] "RemoveContainer" containerID="176c613290be40bf9e30218ba39a996dbcdbd04898e894727efcccf97ebe7fc9" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.451532 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-69f567c75d-8tg9t"] Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.469274 4730 scope.go:117] "RemoveContainer" containerID="cc77bf0faf285f46d37cda0de4d2ec462c83281672fcb1dd85cd84b88861edd3" Sep 30 10:08:08 crc kubenswrapper[4730]: E0930 10:08:08.469717 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc77bf0faf285f46d37cda0de4d2ec462c83281672fcb1dd85cd84b88861edd3\": container with ID starting with cc77bf0faf285f46d37cda0de4d2ec462c83281672fcb1dd85cd84b88861edd3 not found: ID does not exist" containerID="cc77bf0faf285f46d37cda0de4d2ec462c83281672fcb1dd85cd84b88861edd3" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.469750 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc77bf0faf285f46d37cda0de4d2ec462c83281672fcb1dd85cd84b88861edd3"} err="failed to get container status \"cc77bf0faf285f46d37cda0de4d2ec462c83281672fcb1dd85cd84b88861edd3\": rpc error: code = NotFound desc = could not find container \"cc77bf0faf285f46d37cda0de4d2ec462c83281672fcb1dd85cd84b88861edd3\": container with ID starting with cc77bf0faf285f46d37cda0de4d2ec462c83281672fcb1dd85cd84b88861edd3 not found: ID does not exist" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.469772 4730 scope.go:117] "RemoveContainer" containerID="176c613290be40bf9e30218ba39a996dbcdbd04898e894727efcccf97ebe7fc9" Sep 30 10:08:08 crc kubenswrapper[4730]: E0930 10:08:08.469989 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"176c613290be40bf9e30218ba39a996dbcdbd04898e894727efcccf97ebe7fc9\": container with ID starting with 176c613290be40bf9e30218ba39a996dbcdbd04898e894727efcccf97ebe7fc9 not found: ID does not exist" containerID="176c613290be40bf9e30218ba39a996dbcdbd04898e894727efcccf97ebe7fc9" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.470018 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"176c613290be40bf9e30218ba39a996dbcdbd04898e894727efcccf97ebe7fc9"} err="failed to get container status \"176c613290be40bf9e30218ba39a996dbcdbd04898e894727efcccf97ebe7fc9\": rpc error: code = NotFound desc = could not find container \"176c613290be40bf9e30218ba39a996dbcdbd04898e894727efcccf97ebe7fc9\": container with ID starting with 176c613290be40bf9e30218ba39a996dbcdbd04898e894727efcccf97ebe7fc9 not found: ID does not exist" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.708840 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 10:08:08 crc kubenswrapper[4730]: E0930 10:08:08.709211 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5cb84ea-d720-4c98-a515-e28490dd2af4" containerName="barbican-api" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.709230 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5cb84ea-d720-4c98-a515-e28490dd2af4" containerName="barbican-api" Sep 30 10:08:08 crc kubenswrapper[4730]: E0930 10:08:08.709259 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5cb84ea-d720-4c98-a515-e28490dd2af4" containerName="barbican-api-log" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.709266 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5cb84ea-d720-4c98-a515-e28490dd2af4" containerName="barbican-api-log" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.709422 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5cb84ea-d720-4c98-a515-e28490dd2af4" containerName="barbican-api-log" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.709434 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5cb84ea-d720-4c98-a515-e28490dd2af4" containerName="barbican-api" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.710130 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.711845 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-6rmlp" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.712125 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.714266 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.722951 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.816822 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7177e538-cc55-44d5-9274-67a54b79f589-openstack-config\") pod \"openstackclient\" (UID: \"7177e538-cc55-44d5-9274-67a54b79f589\") " pod="openstack/openstackclient" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.816873 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7177e538-cc55-44d5-9274-67a54b79f589-openstack-config-secret\") pod \"openstackclient\" (UID: \"7177e538-cc55-44d5-9274-67a54b79f589\") " pod="openstack/openstackclient" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.816910 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7177e538-cc55-44d5-9274-67a54b79f589-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7177e538-cc55-44d5-9274-67a54b79f589\") " pod="openstack/openstackclient" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.817001 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r45fq\" (UniqueName: \"kubernetes.io/projected/7177e538-cc55-44d5-9274-67a54b79f589-kube-api-access-r45fq\") pod \"openstackclient\" (UID: \"7177e538-cc55-44d5-9274-67a54b79f589\") " pod="openstack/openstackclient" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.919188 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7177e538-cc55-44d5-9274-67a54b79f589-openstack-config\") pod \"openstackclient\" (UID: \"7177e538-cc55-44d5-9274-67a54b79f589\") " pod="openstack/openstackclient" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.919261 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7177e538-cc55-44d5-9274-67a54b79f589-openstack-config-secret\") pod \"openstackclient\" (UID: \"7177e538-cc55-44d5-9274-67a54b79f589\") " pod="openstack/openstackclient" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.919295 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7177e538-cc55-44d5-9274-67a54b79f589-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7177e538-cc55-44d5-9274-67a54b79f589\") " pod="openstack/openstackclient" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.919346 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r45fq\" (UniqueName: \"kubernetes.io/projected/7177e538-cc55-44d5-9274-67a54b79f589-kube-api-access-r45fq\") pod \"openstackclient\" (UID: \"7177e538-cc55-44d5-9274-67a54b79f589\") " pod="openstack/openstackclient" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.920107 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7177e538-cc55-44d5-9274-67a54b79f589-openstack-config\") pod \"openstackclient\" (UID: \"7177e538-cc55-44d5-9274-67a54b79f589\") " pod="openstack/openstackclient" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.924897 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7177e538-cc55-44d5-9274-67a54b79f589-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7177e538-cc55-44d5-9274-67a54b79f589\") " pod="openstack/openstackclient" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.931435 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7177e538-cc55-44d5-9274-67a54b79f589-openstack-config-secret\") pod \"openstackclient\" (UID: \"7177e538-cc55-44d5-9274-67a54b79f589\") " pod="openstack/openstackclient" Sep 30 10:08:08 crc kubenswrapper[4730]: I0930 10:08:08.939206 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r45fq\" (UniqueName: \"kubernetes.io/projected/7177e538-cc55-44d5-9274-67a54b79f589-kube-api-access-r45fq\") pod \"openstackclient\" (UID: \"7177e538-cc55-44d5-9274-67a54b79f589\") " pod="openstack/openstackclient" Sep 30 10:08:09 crc kubenswrapper[4730]: I0930 10:08:09.048209 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 10:08:09 crc kubenswrapper[4730]: I0930 10:08:09.510286 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 10:08:09 crc kubenswrapper[4730]: W0930 10:08:09.514975 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7177e538_cc55_44d5_9274_67a54b79f589.slice/crio-8e70add30ee6a3ee7f6160761145caac79684bffa364e7fe6bd106a6fdbed72f WatchSource:0}: Error finding container 8e70add30ee6a3ee7f6160761145caac79684bffa364e7fe6bd106a6fdbed72f: Status 404 returned error can't find the container with id 8e70add30ee6a3ee7f6160761145caac79684bffa364e7fe6bd106a6fdbed72f Sep 30 10:08:09 crc kubenswrapper[4730]: I0930 10:08:09.763780 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:08:09 crc kubenswrapper[4730]: I0930 10:08:09.825108 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d888b67c9-cpbvx"] Sep 30 10:08:09 crc kubenswrapper[4730]: I0930 10:08:09.825376 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" podUID="18e54bc5-0acd-437b-bce0-7cb0147c4ab1" containerName="dnsmasq-dns" containerID="cri-o://e68e1c63f375fee1c775a9cd351f8f67166a04f049890483cfa34efa17456e74" gracePeriod=10 Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.394412 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5cb84ea-d720-4c98-a515-e28490dd2af4" path="/var/lib/kubelet/pods/a5cb84ea-d720-4c98-a515-e28490dd2af4/volumes" Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.446504 4730 generic.go:334] "Generic (PLEG): container finished" podID="18e54bc5-0acd-437b-bce0-7cb0147c4ab1" containerID="e68e1c63f375fee1c775a9cd351f8f67166a04f049890483cfa34efa17456e74" exitCode=0 Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.446579 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" event={"ID":"18e54bc5-0acd-437b-bce0-7cb0147c4ab1","Type":"ContainerDied","Data":"e68e1c63f375fee1c775a9cd351f8f67166a04f049890483cfa34efa17456e74"} Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.446605 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" event={"ID":"18e54bc5-0acd-437b-bce0-7cb0147c4ab1","Type":"ContainerDied","Data":"9a1f27061090bae89318daee89b1bbffc830268d7cd79f9358c08b4737c95818"} Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.446633 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a1f27061090bae89318daee89b1bbffc830268d7cd79f9358c08b4737c95818" Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.453910 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"7177e538-cc55-44d5-9274-67a54b79f589","Type":"ContainerStarted","Data":"8e70add30ee6a3ee7f6160761145caac79684bffa364e7fe6bd106a6fdbed72f"} Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.476469 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.568682 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-dns-svc\") pod \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.568837 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-ovsdbserver-sb\") pod \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.568880 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-config\") pod \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.568981 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-ovsdbserver-nb\") pod \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.569019 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm4bj\" (UniqueName: \"kubernetes.io/projected/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-kube-api-access-dm4bj\") pod \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\" (UID: \"18e54bc5-0acd-437b-bce0-7cb0147c4ab1\") " Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.597848 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-kube-api-access-dm4bj" (OuterVolumeSpecName: "kube-api-access-dm4bj") pod "18e54bc5-0acd-437b-bce0-7cb0147c4ab1" (UID: "18e54bc5-0acd-437b-bce0-7cb0147c4ab1"). InnerVolumeSpecName "kube-api-access-dm4bj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.663337 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "18e54bc5-0acd-437b-bce0-7cb0147c4ab1" (UID: "18e54bc5-0acd-437b-bce0-7cb0147c4ab1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.663342 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "18e54bc5-0acd-437b-bce0-7cb0147c4ab1" (UID: "18e54bc5-0acd-437b-bce0-7cb0147c4ab1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.675901 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.675933 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.675947 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm4bj\" (UniqueName: \"kubernetes.io/projected/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-kube-api-access-dm4bj\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.711602 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "18e54bc5-0acd-437b-bce0-7cb0147c4ab1" (UID: "18e54bc5-0acd-437b-bce0-7cb0147c4ab1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.766174 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-config" (OuterVolumeSpecName: "config") pod "18e54bc5-0acd-437b-bce0-7cb0147c4ab1" (UID: "18e54bc5-0acd-437b-bce0-7cb0147c4ab1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.778832 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:10 crc kubenswrapper[4730]: I0930 10:08:10.778888 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18e54bc5-0acd-437b-bce0-7cb0147c4ab1-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:11 crc kubenswrapper[4730]: I0930 10:08:11.461891 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d888b67c9-cpbvx" Sep 30 10:08:11 crc kubenswrapper[4730]: I0930 10:08:11.495419 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d888b67c9-cpbvx"] Sep 30 10:08:11 crc kubenswrapper[4730]: I0930 10:08:11.506009 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7d888b67c9-cpbvx"] Sep 30 10:08:12 crc kubenswrapper[4730]: I0930 10:08:12.394366 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18e54bc5-0acd-437b-bce0-7cb0147c4ab1" path="/var/lib/kubelet/pods/18e54bc5-0acd-437b-bce0-7cb0147c4ab1/volumes" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.483604 4730 generic.go:334] "Generic (PLEG): container finished" podID="488816e9-d4e6-4956-9671-c9de4118821c" containerID="514ef0c0b6f7f016c7be6ed49a18e331ed0e8c99322854a31d779f4ece6ba4f5" exitCode=0 Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.483745 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-wrvww" event={"ID":"488816e9-d4e6-4956-9671-c9de4118821c","Type":"ContainerDied","Data":"514ef0c0b6f7f016c7be6ed49a18e331ed0e8c99322854a31d779f4ece6ba4f5"} Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.588756 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-8vbfh"] Sep 30 10:08:13 crc kubenswrapper[4730]: E0930 10:08:13.589117 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18e54bc5-0acd-437b-bce0-7cb0147c4ab1" containerName="init" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.589134 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="18e54bc5-0acd-437b-bce0-7cb0147c4ab1" containerName="init" Sep 30 10:08:13 crc kubenswrapper[4730]: E0930 10:08:13.589148 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18e54bc5-0acd-437b-bce0-7cb0147c4ab1" containerName="dnsmasq-dns" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.589153 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="18e54bc5-0acd-437b-bce0-7cb0147c4ab1" containerName="dnsmasq-dns" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.589356 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="18e54bc5-0acd-437b-bce0-7cb0147c4ab1" containerName="dnsmasq-dns" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.589990 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8vbfh" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.607699 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-8vbfh"] Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.679369 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-svtfd"] Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.680603 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-svtfd" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.691445 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-svtfd"] Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.733738 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5zcs\" (UniqueName: \"kubernetes.io/projected/fee047bc-cc3d-4044-9045-955a116357df-kube-api-access-p5zcs\") pod \"nova-api-db-create-8vbfh\" (UID: \"fee047bc-cc3d-4044-9045-955a116357df\") " pod="openstack/nova-api-db-create-8vbfh" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.835258 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5zcs\" (UniqueName: \"kubernetes.io/projected/fee047bc-cc3d-4044-9045-955a116357df-kube-api-access-p5zcs\") pod \"nova-api-db-create-8vbfh\" (UID: \"fee047bc-cc3d-4044-9045-955a116357df\") " pod="openstack/nova-api-db-create-8vbfh" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.835409 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrjxq\" (UniqueName: \"kubernetes.io/projected/6750d3c8-c106-4c5a-a591-a243a9135831-kube-api-access-rrjxq\") pod \"nova-cell0-db-create-svtfd\" (UID: \"6750d3c8-c106-4c5a-a591-a243a9135831\") " pod="openstack/nova-cell0-db-create-svtfd" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.857434 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5zcs\" (UniqueName: \"kubernetes.io/projected/fee047bc-cc3d-4044-9045-955a116357df-kube-api-access-p5zcs\") pod \"nova-api-db-create-8vbfh\" (UID: \"fee047bc-cc3d-4044-9045-955a116357df\") " pod="openstack/nova-api-db-create-8vbfh" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.884246 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-v6ms2"] Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.886568 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-v6ms2" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.900389 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-v6ms2"] Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.913841 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8vbfh" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.937678 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrjxq\" (UniqueName: \"kubernetes.io/projected/6750d3c8-c106-4c5a-a591-a243a9135831-kube-api-access-rrjxq\") pod \"nova-cell0-db-create-svtfd\" (UID: \"6750d3c8-c106-4c5a-a591-a243a9135831\") " pod="openstack/nova-cell0-db-create-svtfd" Sep 30 10:08:13 crc kubenswrapper[4730]: I0930 10:08:13.968812 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrjxq\" (UniqueName: \"kubernetes.io/projected/6750d3c8-c106-4c5a-a591-a243a9135831-kube-api-access-rrjxq\") pod \"nova-cell0-db-create-svtfd\" (UID: \"6750d3c8-c106-4c5a-a591-a243a9135831\") " pod="openstack/nova-cell0-db-create-svtfd" Sep 30 10:08:14 crc kubenswrapper[4730]: I0930 10:08:14.002684 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-svtfd" Sep 30 10:08:14 crc kubenswrapper[4730]: I0930 10:08:14.039564 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwvmj\" (UniqueName: \"kubernetes.io/projected/a23804b2-1fe7-4b61-8b65-3d4be3166140-kube-api-access-kwvmj\") pod \"nova-cell1-db-create-v6ms2\" (UID: \"a23804b2-1fe7-4b61-8b65-3d4be3166140\") " pod="openstack/nova-cell1-db-create-v6ms2" Sep 30 10:08:14 crc kubenswrapper[4730]: I0930 10:08:14.141822 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwvmj\" (UniqueName: \"kubernetes.io/projected/a23804b2-1fe7-4b61-8b65-3d4be3166140-kube-api-access-kwvmj\") pod \"nova-cell1-db-create-v6ms2\" (UID: \"a23804b2-1fe7-4b61-8b65-3d4be3166140\") " pod="openstack/nova-cell1-db-create-v6ms2" Sep 30 10:08:14 crc kubenswrapper[4730]: I0930 10:08:14.160983 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwvmj\" (UniqueName: \"kubernetes.io/projected/a23804b2-1fe7-4b61-8b65-3d4be3166140-kube-api-access-kwvmj\") pod \"nova-cell1-db-create-v6ms2\" (UID: \"a23804b2-1fe7-4b61-8b65-3d4be3166140\") " pod="openstack/nova-cell1-db-create-v6ms2" Sep 30 10:08:14 crc kubenswrapper[4730]: I0930 10:08:14.234220 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-v6ms2" Sep 30 10:08:15 crc kubenswrapper[4730]: I0930 10:08:15.381269 4730 scope.go:117] "RemoveContainer" containerID="e752f863911725e4007eead2b8230aaef061b3efb1cf650c3a8365b075e404e5" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.486540 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-wrvww" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.564371 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-combined-ca-bundle\") pod \"488816e9-d4e6-4956-9671-c9de4118821c\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.564838 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-db-sync-config-data\") pod \"488816e9-d4e6-4956-9671-c9de4118821c\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.564866 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-config-data\") pod \"488816e9-d4e6-4956-9671-c9de4118821c\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.564894 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/488816e9-d4e6-4956-9671-c9de4118821c-etc-machine-id\") pod \"488816e9-d4e6-4956-9671-c9de4118821c\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.564990 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9jsl\" (UniqueName: \"kubernetes.io/projected/488816e9-d4e6-4956-9671-c9de4118821c-kube-api-access-w9jsl\") pod \"488816e9-d4e6-4956-9671-c9de4118821c\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.565033 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-scripts\") pod \"488816e9-d4e6-4956-9671-c9de4118821c\" (UID: \"488816e9-d4e6-4956-9671-c9de4118821c\") " Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.568209 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/488816e9-d4e6-4956-9671-c9de4118821c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "488816e9-d4e6-4956-9671-c9de4118821c" (UID: "488816e9-d4e6-4956-9671-c9de4118821c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.573859 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-wrvww" event={"ID":"488816e9-d4e6-4956-9671-c9de4118821c","Type":"ContainerDied","Data":"627b4e4ea5280d050face9e330a8d3540bdc9d8c022f887f4d930951d6380517"} Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.573902 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="627b4e4ea5280d050face9e330a8d3540bdc9d8c022f887f4d930951d6380517" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.574006 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-wrvww" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.574981 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "488816e9-d4e6-4956-9671-c9de4118821c" (UID: "488816e9-d4e6-4956-9671-c9de4118821c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.587227 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-scripts" (OuterVolumeSpecName: "scripts") pod "488816e9-d4e6-4956-9671-c9de4118821c" (UID: "488816e9-d4e6-4956-9671-c9de4118821c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.587477 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/488816e9-d4e6-4956-9671-c9de4118821c-kube-api-access-w9jsl" (OuterVolumeSpecName: "kube-api-access-w9jsl") pod "488816e9-d4e6-4956-9671-c9de4118821c" (UID: "488816e9-d4e6-4956-9671-c9de4118821c"). InnerVolumeSpecName "kube-api-access-w9jsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.621906 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "488816e9-d4e6-4956-9671-c9de4118821c" (UID: "488816e9-d4e6-4956-9671-c9de4118821c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.664273 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-config-data" (OuterVolumeSpecName: "config-data") pod "488816e9-d4e6-4956-9671-c9de4118821c" (UID: "488816e9-d4e6-4956-9671-c9de4118821c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.667450 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.667475 4730 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.667484 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.667493 4730 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/488816e9-d4e6-4956-9671-c9de4118821c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.667501 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9jsl\" (UniqueName: \"kubernetes.io/projected/488816e9-d4e6-4956-9671-c9de4118821c-kube-api-access-w9jsl\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.667512 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/488816e9-d4e6-4956-9671-c9de4118821c-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:20 crc kubenswrapper[4730]: I0930 10:08:20.897042 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-svtfd"] Sep 30 10:08:21 crc kubenswrapper[4730]: W0930 10:08:21.021696 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda23804b2_1fe7_4b61_8b65_3d4be3166140.slice/crio-b6d909d30c028810c53195c3e92cfbf6da9f097bb2859423456ae13c18185833 WatchSource:0}: Error finding container b6d909d30c028810c53195c3e92cfbf6da9f097bb2859423456ae13c18185833: Status 404 returned error can't find the container with id b6d909d30c028810c53195c3e92cfbf6da9f097bb2859423456ae13c18185833 Sep 30 10:08:21 crc kubenswrapper[4730]: W0930 10:08:21.032424 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfee047bc_cc3d_4044_9045_955a116357df.slice/crio-7ab4b1801db8d2c66cba0fb824c5e6f71412539aa9d79382917595f5cfd28007 WatchSource:0}: Error finding container 7ab4b1801db8d2c66cba0fb824c5e6f71412539aa9d79382917595f5cfd28007: Status 404 returned error can't find the container with id 7ab4b1801db8d2c66cba0fb824c5e6f71412539aa9d79382917595f5cfd28007 Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.034200 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-v6ms2"] Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.053706 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-8vbfh"] Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.589179 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.589491 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="ceilometer-central-agent" containerID="cri-o://00f330622caf2a6afaff47b297f4c11aa71f7e71c60fec3679b1fbf6df90c535" gracePeriod=30 Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.590117 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="proxy-httpd" containerID="cri-o://b08ba61ac80398fe5be6ce46ba02560ddb080e7f76384ef8a1175ddbe761af49" gracePeriod=30 Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.590303 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="sg-core" containerID="cri-o://5c459915c72a30167105b239d3121c064572a723a37eb6cd058065e0a964e5fd" gracePeriod=30 Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.590350 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="ceilometer-notification-agent" containerID="cri-o://96d0060e426320737b668a20b50713e0063ccf78a300b5f83f97b8eacddaf3ee" gracePeriod=30 Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.606907 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.608509 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"a4f9bd21-5f86-4443-87be-eadb5d1c77f9","Type":"ContainerStarted","Data":"767915a10d018b40c18d12dc3c1d3b41568181e4b1ea08a89ae112a458629c45"} Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.612304 4730 generic.go:334] "Generic (PLEG): container finished" podID="6750d3c8-c106-4c5a-a591-a243a9135831" containerID="a1bbd2e814128f14d567f74f472517b4f72376037b6c592ab724d24024031e71" exitCode=0 Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.612384 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-svtfd" event={"ID":"6750d3c8-c106-4c5a-a591-a243a9135831","Type":"ContainerDied","Data":"a1bbd2e814128f14d567f74f472517b4f72376037b6c592ab724d24024031e71"} Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.612413 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-svtfd" event={"ID":"6750d3c8-c106-4c5a-a591-a243a9135831","Type":"ContainerStarted","Data":"d6203a835a0b8b78f5e9ad302bc47fe9c502d1cf1a678e07a8d6c0a57f4f88ef"} Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.618876 4730 generic.go:334] "Generic (PLEG): container finished" podID="a23804b2-1fe7-4b61-8b65-3d4be3166140" containerID="d3d23ce695353dea38c25b944bb46bfcc2b5a0c875b22aaf1c965c406e8f8720" exitCode=0 Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.618949 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-v6ms2" event={"ID":"a23804b2-1fe7-4b61-8b65-3d4be3166140","Type":"ContainerDied","Data":"d3d23ce695353dea38c25b944bb46bfcc2b5a0c875b22aaf1c965c406e8f8720"} Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.618973 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-v6ms2" event={"ID":"a23804b2-1fe7-4b61-8b65-3d4be3166140","Type":"ContainerStarted","Data":"b6d909d30c028810c53195c3e92cfbf6da9f097bb2859423456ae13c18185833"} Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.620837 4730 generic.go:334] "Generic (PLEG): container finished" podID="fee047bc-cc3d-4044-9045-955a116357df" containerID="8a3ffd5de26ac98419697a858586a7f8735b97fb435bc42d6480d1227d737349" exitCode=0 Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.620882 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8vbfh" event={"ID":"fee047bc-cc3d-4044-9045-955a116357df","Type":"ContainerDied","Data":"8a3ffd5de26ac98419697a858586a7f8735b97fb435bc42d6480d1227d737349"} Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.620897 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8vbfh" event={"ID":"fee047bc-cc3d-4044-9045-955a116357df","Type":"ContainerStarted","Data":"7ab4b1801db8d2c66cba0fb824c5e6f71412539aa9d79382917595f5cfd28007"} Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.624179 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"7177e538-cc55-44d5-9274-67a54b79f589","Type":"ContainerStarted","Data":"18e58a2f6f2b0c8ec97bd9e4d78b97f35d60a31d0ce6e2202fdcdd28203707ba"} Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.781665 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 10:08:21 crc kubenswrapper[4730]: E0930 10:08:21.782160 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="488816e9-d4e6-4956-9671-c9de4118821c" containerName="cinder-db-sync" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.782184 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="488816e9-d4e6-4956-9671-c9de4118821c" containerName="cinder-db-sync" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.782459 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="488816e9-d4e6-4956-9671-c9de4118821c" containerName="cinder-db-sync" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.783800 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.792592 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-w988v" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.792854 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.793029 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.793207 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.810973 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.95392374 podStartE2EDuration="13.810950491s" podCreationTimestamp="2025-09-30 10:08:08 +0000 UTC" firstStartedPulling="2025-09-30 10:08:09.516786846 +0000 UTC m=+1133.850046839" lastFinishedPulling="2025-09-30 10:08:20.373813597 +0000 UTC m=+1144.707073590" observedRunningTime="2025-09-30 10:08:21.759628546 +0000 UTC m=+1146.092888539" watchObservedRunningTime="2025-09-30 10:08:21.810950491 +0000 UTC m=+1146.144210474" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.863210 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.917559 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwtq4\" (UniqueName: \"kubernetes.io/projected/7df8ec89-cd12-419e-83a2-ca0d04205f41-kube-api-access-gwtq4\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.917761 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.917829 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-scripts\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.920398 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7df8ec89-cd12-419e-83a2-ca0d04205f41-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.920531 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.920563 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-config-data\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.963732 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c65c577dc-tzdmg"] Sep 30 10:08:21 crc kubenswrapper[4730]: I0930 10:08:21.967176 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.005818 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c65c577dc-tzdmg"] Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.025060 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.025141 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-scripts\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.025191 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7df8ec89-cd12-419e-83a2-ca0d04205f41-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.025251 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-config-data\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.025274 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.025341 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwtq4\" (UniqueName: \"kubernetes.io/projected/7df8ec89-cd12-419e-83a2-ca0d04205f41-kube-api-access-gwtq4\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.030837 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7df8ec89-cd12-419e-83a2-ca0d04205f41-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.056268 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.059050 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.062474 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.063510 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-scripts\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.064041 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.066641 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-config-data\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.069324 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.080375 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwtq4\" (UniqueName: \"kubernetes.io/projected/7df8ec89-cd12-419e-83a2-ca0d04205f41-kube-api-access-gwtq4\") pod \"cinder-scheduler-0\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.135598 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-config\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.135724 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-dns-svc\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.135765 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gncfx\" (UniqueName: \"kubernetes.io/projected/47203112-feec-49c6-a377-145258876393-kube-api-access-gncfx\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.135812 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-ovsdbserver-nb\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.135841 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-ovsdbserver-sb\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.135952 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.150644 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.237895 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-scripts\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.237976 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-config\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.238044 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-logs\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.238070 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.238133 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-dns-svc\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.238187 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gncfx\" (UniqueName: \"kubernetes.io/projected/47203112-feec-49c6-a377-145258876393-kube-api-access-gncfx\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.238215 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-config-data\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.238244 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-config-data-custom\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.238294 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-ovsdbserver-nb\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.238338 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-ovsdbserver-sb\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.238387 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.238417 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kxwb\" (UniqueName: \"kubernetes.io/projected/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-kube-api-access-9kxwb\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.239418 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-config\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.240082 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-dns-svc\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.242107 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-ovsdbserver-nb\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.242781 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-ovsdbserver-sb\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.287551 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gncfx\" (UniqueName: \"kubernetes.io/projected/47203112-feec-49c6-a377-145258876393-kube-api-access-gncfx\") pod \"dnsmasq-dns-5c65c577dc-tzdmg\" (UID: \"47203112-feec-49c6-a377-145258876393\") " pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.314080 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.343789 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-config-data\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.343842 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-config-data-custom\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.343975 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.344012 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kxwb\" (UniqueName: \"kubernetes.io/projected/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-kube-api-access-9kxwb\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.344039 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-scripts\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.344100 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-logs\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.344123 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.344279 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.359081 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-logs\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.360327 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-config-data\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.369677 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.370011 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-scripts\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.375168 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-config-data-custom\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.386479 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kxwb\" (UniqueName: \"kubernetes.io/projected/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-kube-api-access-9kxwb\") pod \"cinder-api-0\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.467476 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.677120 4730 generic.go:334] "Generic (PLEG): container finished" podID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerID="b08ba61ac80398fe5be6ce46ba02560ddb080e7f76384ef8a1175ddbe761af49" exitCode=0 Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.677376 4730 generic.go:334] "Generic (PLEG): container finished" podID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerID="5c459915c72a30167105b239d3121c064572a723a37eb6cd058065e0a964e5fd" exitCode=2 Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.677528 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"522b231d-b240-485c-b52e-fbf2a2fe44bf","Type":"ContainerDied","Data":"b08ba61ac80398fe5be6ce46ba02560ddb080e7f76384ef8a1175ddbe761af49"} Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.677558 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"522b231d-b240-485c-b52e-fbf2a2fe44bf","Type":"ContainerDied","Data":"5c459915c72a30167105b239d3121c064572a723a37eb6cd058065e0a964e5fd"} Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.936199 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.994562 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 30 10:08:22 crc kubenswrapper[4730]: I0930 10:08:22.994970 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.034289 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.585001 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c65c577dc-tzdmg"] Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.662506 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-v6ms2" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.684437 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-svtfd" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.691674 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8vbfh" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.705555 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.755851 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7df8ec89-cd12-419e-83a2-ca0d04205f41","Type":"ContainerStarted","Data":"a284e9a2d69cfbf635980e37d0e7c57c7f758b0b6240a1a5203c6eea32c3a953"} Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.767278 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" event={"ID":"47203112-feec-49c6-a377-145258876393","Type":"ContainerStarted","Data":"8224e587aafa27e2dce4e0fc0c1fd13467263849a015778394b42ed6033ed682"} Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.794789 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-svtfd" event={"ID":"6750d3c8-c106-4c5a-a591-a243a9135831","Type":"ContainerDied","Data":"d6203a835a0b8b78f5e9ad302bc47fe9c502d1cf1a678e07a8d6c0a57f4f88ef"} Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.794823 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6203a835a0b8b78f5e9ad302bc47fe9c502d1cf1a678e07a8d6c0a57f4f88ef" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.795089 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-svtfd" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.805159 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-v6ms2" event={"ID":"a23804b2-1fe7-4b61-8b65-3d4be3166140","Type":"ContainerDied","Data":"b6d909d30c028810c53195c3e92cfbf6da9f097bb2859423456ae13c18185833"} Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.805208 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6d909d30c028810c53195c3e92cfbf6da9f097bb2859423456ae13c18185833" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.805280 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-v6ms2" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.812101 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrjxq\" (UniqueName: \"kubernetes.io/projected/6750d3c8-c106-4c5a-a591-a243a9135831-kube-api-access-rrjxq\") pod \"6750d3c8-c106-4c5a-a591-a243a9135831\" (UID: \"6750d3c8-c106-4c5a-a591-a243a9135831\") " Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.812226 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5zcs\" (UniqueName: \"kubernetes.io/projected/fee047bc-cc3d-4044-9045-955a116357df-kube-api-access-p5zcs\") pod \"fee047bc-cc3d-4044-9045-955a116357df\" (UID: \"fee047bc-cc3d-4044-9045-955a116357df\") " Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.812298 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwvmj\" (UniqueName: \"kubernetes.io/projected/a23804b2-1fe7-4b61-8b65-3d4be3166140-kube-api-access-kwvmj\") pod \"a23804b2-1fe7-4b61-8b65-3d4be3166140\" (UID: \"a23804b2-1fe7-4b61-8b65-3d4be3166140\") " Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.819144 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8vbfh" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.819257 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8vbfh" event={"ID":"fee047bc-cc3d-4044-9045-955a116357df","Type":"ContainerDied","Data":"7ab4b1801db8d2c66cba0fb824c5e6f71412539aa9d79382917595f5cfd28007"} Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.819281 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ab4b1801db8d2c66cba0fb824c5e6f71412539aa9d79382917595f5cfd28007" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.826881 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6750d3c8-c106-4c5a-a591-a243a9135831-kube-api-access-rrjxq" (OuterVolumeSpecName: "kube-api-access-rrjxq") pod "6750d3c8-c106-4c5a-a591-a243a9135831" (UID: "6750d3c8-c106-4c5a-a591-a243a9135831"). InnerVolumeSpecName "kube-api-access-rrjxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.827001 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fee047bc-cc3d-4044-9045-955a116357df-kube-api-access-p5zcs" (OuterVolumeSpecName: "kube-api-access-p5zcs") pod "fee047bc-cc3d-4044-9045-955a116357df" (UID: "fee047bc-cc3d-4044-9045-955a116357df"). InnerVolumeSpecName "kube-api-access-p5zcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.827410 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a23804b2-1fe7-4b61-8b65-3d4be3166140-kube-api-access-kwvmj" (OuterVolumeSpecName: "kube-api-access-kwvmj") pod "a23804b2-1fe7-4b61-8b65-3d4be3166140" (UID: "a23804b2-1fe7-4b61-8b65-3d4be3166140"). InnerVolumeSpecName "kube-api-access-kwvmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.831781 4730 generic.go:334] "Generic (PLEG): container finished" podID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerID="00f330622caf2a6afaff47b297f4c11aa71f7e71c60fec3679b1fbf6df90c535" exitCode=0 Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.832349 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"522b231d-b240-485c-b52e-fbf2a2fe44bf","Type":"ContainerDied","Data":"00f330622caf2a6afaff47b297f4c11aa71f7e71c60fec3679b1fbf6df90c535"} Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.923222 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrjxq\" (UniqueName: \"kubernetes.io/projected/6750d3c8-c106-4c5a-a591-a243a9135831-kube-api-access-rrjxq\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.923254 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5zcs\" (UniqueName: \"kubernetes.io/projected/fee047bc-cc3d-4044-9045-955a116357df-kube-api-access-p5zcs\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.923264 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwvmj\" (UniqueName: \"kubernetes.io/projected/a23804b2-1fe7-4b61-8b65-3d4be3166140-kube-api-access-kwvmj\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:23 crc kubenswrapper[4730]: I0930 10:08:23.926245 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Sep 30 10:08:24 crc kubenswrapper[4730]: I0930 10:08:24.403300 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 10:08:24 crc kubenswrapper[4730]: I0930 10:08:24.844113 4730 generic.go:334] "Generic (PLEG): container finished" podID="47203112-feec-49c6-a377-145258876393" containerID="44f32d504f1491302cc30c6ff9346c69ceca8868628da7b3c9488db8e47d1b35" exitCode=0 Sep 30 10:08:24 crc kubenswrapper[4730]: I0930 10:08:24.844500 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" event={"ID":"47203112-feec-49c6-a377-145258876393","Type":"ContainerDied","Data":"44f32d504f1491302cc30c6ff9346c69ceca8868628da7b3c9488db8e47d1b35"} Sep 30 10:08:24 crc kubenswrapper[4730]: I0930 10:08:24.849720 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"880dcb98-9631-48d9-a6d6-715f3ea9d3d8","Type":"ContainerStarted","Data":"b4cadf1c05df5797545320b4e669cae97251e7b2d46334f24794895b4b162206"} Sep 30 10:08:25 crc kubenswrapper[4730]: I0930 10:08:25.861131 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"880dcb98-9631-48d9-a6d6-715f3ea9d3d8","Type":"ContainerStarted","Data":"68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8"} Sep 30 10:08:25 crc kubenswrapper[4730]: I0930 10:08:25.863764 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7df8ec89-cd12-419e-83a2-ca0d04205f41","Type":"ContainerStarted","Data":"6b24b471a3235e8d01a54393e74a1e7397d0deab97d3c165bd9fdc5d5aa38e4c"} Sep 30 10:08:26 crc kubenswrapper[4730]: I0930 10:08:26.895307 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" event={"ID":"47203112-feec-49c6-a377-145258876393","Type":"ContainerStarted","Data":"c8d23dcdc699d9f08de7751fd49ce06b7faf82a17e559868455cf1297fd73a1e"} Sep 30 10:08:26 crc kubenswrapper[4730]: I0930 10:08:26.896456 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:26 crc kubenswrapper[4730]: I0930 10:08:26.909877 4730 generic.go:334] "Generic (PLEG): container finished" podID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerID="96d0060e426320737b668a20b50713e0063ccf78a300b5f83f97b8eacddaf3ee" exitCode=0 Sep 30 10:08:26 crc kubenswrapper[4730]: I0930 10:08:26.909962 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"522b231d-b240-485c-b52e-fbf2a2fe44bf","Type":"ContainerDied","Data":"96d0060e426320737b668a20b50713e0063ccf78a300b5f83f97b8eacddaf3ee"} Sep 30 10:08:26 crc kubenswrapper[4730]: I0930 10:08:26.919553 4730 generic.go:334] "Generic (PLEG): container finished" podID="a4f9bd21-5f86-4443-87be-eadb5d1c77f9" containerID="767915a10d018b40c18d12dc3c1d3b41568181e4b1ea08a89ae112a458629c45" exitCode=1 Sep 30 10:08:26 crc kubenswrapper[4730]: I0930 10:08:26.919639 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"a4f9bd21-5f86-4443-87be-eadb5d1c77f9","Type":"ContainerDied","Data":"767915a10d018b40c18d12dc3c1d3b41568181e4b1ea08a89ae112a458629c45"} Sep 30 10:08:26 crc kubenswrapper[4730]: I0930 10:08:26.919695 4730 scope.go:117] "RemoveContainer" containerID="e752f863911725e4007eead2b8230aaef061b3efb1cf650c3a8365b075e404e5" Sep 30 10:08:26 crc kubenswrapper[4730]: I0930 10:08:26.920317 4730 scope.go:117] "RemoveContainer" containerID="767915a10d018b40c18d12dc3c1d3b41568181e4b1ea08a89ae112a458629c45" Sep 30 10:08:26 crc kubenswrapper[4730]: E0930 10:08:26.920580 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(a4f9bd21-5f86-4443-87be-eadb5d1c77f9)\"" pod="openstack/watcher-decision-engine-0" podUID="a4f9bd21-5f86-4443-87be-eadb5d1c77f9" Sep 30 10:08:26 crc kubenswrapper[4730]: I0930 10:08:26.939717 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7df8ec89-cd12-419e-83a2-ca0d04205f41","Type":"ContainerStarted","Data":"0b2b9d608b6600aa9fc53e605ebac1990a1916ee85de3e98c9592e5d2e3c6786"} Sep 30 10:08:26 crc kubenswrapper[4730]: I0930 10:08:26.971322 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" podStartSLOduration=5.971301547 podStartE2EDuration="5.971301547s" podCreationTimestamp="2025-09-30 10:08:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:08:26.918596836 +0000 UTC m=+1151.251856829" watchObservedRunningTime="2025-09-30 10:08:26.971301547 +0000 UTC m=+1151.304561540" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.008324 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.550984515 podStartE2EDuration="6.008299387s" podCreationTimestamp="2025-09-30 10:08:21 +0000 UTC" firstStartedPulling="2025-09-30 10:08:22.955241706 +0000 UTC m=+1147.288501689" lastFinishedPulling="2025-09-30 10:08:23.412556568 +0000 UTC m=+1147.745816561" observedRunningTime="2025-09-30 10:08:26.989790447 +0000 UTC m=+1151.323050440" watchObservedRunningTime="2025-09-30 10:08:27.008299387 +0000 UTC m=+1151.341559380" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.151809 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.436956 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.513876 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/522b231d-b240-485c-b52e-fbf2a2fe44bf-run-httpd\") pod \"522b231d-b240-485c-b52e-fbf2a2fe44bf\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.514540 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-config-data\") pod \"522b231d-b240-485c-b52e-fbf2a2fe44bf\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.514723 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-sg-core-conf-yaml\") pod \"522b231d-b240-485c-b52e-fbf2a2fe44bf\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.514867 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjx9d\" (UniqueName: \"kubernetes.io/projected/522b231d-b240-485c-b52e-fbf2a2fe44bf-kube-api-access-sjx9d\") pod \"522b231d-b240-485c-b52e-fbf2a2fe44bf\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.515041 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/522b231d-b240-485c-b52e-fbf2a2fe44bf-log-httpd\") pod \"522b231d-b240-485c-b52e-fbf2a2fe44bf\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.515119 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-combined-ca-bundle\") pod \"522b231d-b240-485c-b52e-fbf2a2fe44bf\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.515206 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-scripts\") pod \"522b231d-b240-485c-b52e-fbf2a2fe44bf\" (UID: \"522b231d-b240-485c-b52e-fbf2a2fe44bf\") " Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.514494 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/522b231d-b240-485c-b52e-fbf2a2fe44bf-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "522b231d-b240-485c-b52e-fbf2a2fe44bf" (UID: "522b231d-b240-485c-b52e-fbf2a2fe44bf"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.516079 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/522b231d-b240-485c-b52e-fbf2a2fe44bf-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "522b231d-b240-485c-b52e-fbf2a2fe44bf" (UID: "522b231d-b240-485c-b52e-fbf2a2fe44bf"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.534953 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/522b231d-b240-485c-b52e-fbf2a2fe44bf-kube-api-access-sjx9d" (OuterVolumeSpecName: "kube-api-access-sjx9d") pod "522b231d-b240-485c-b52e-fbf2a2fe44bf" (UID: "522b231d-b240-485c-b52e-fbf2a2fe44bf"). InnerVolumeSpecName "kube-api-access-sjx9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.558776 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-scripts" (OuterVolumeSpecName: "scripts") pod "522b231d-b240-485c-b52e-fbf2a2fe44bf" (UID: "522b231d-b240-485c-b52e-fbf2a2fe44bf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.617356 4730 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/522b231d-b240-485c-b52e-fbf2a2fe44bf-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.617420 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjx9d\" (UniqueName: \"kubernetes.io/projected/522b231d-b240-485c-b52e-fbf2a2fe44bf-kube-api-access-sjx9d\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.617434 4730 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/522b231d-b240-485c-b52e-fbf2a2fe44bf-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.617444 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.688957 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "522b231d-b240-485c-b52e-fbf2a2fe44bf" (UID: "522b231d-b240-485c-b52e-fbf2a2fe44bf"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.712693 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-config-data" (OuterVolumeSpecName: "config-data") pod "522b231d-b240-485c-b52e-fbf2a2fe44bf" (UID: "522b231d-b240-485c-b52e-fbf2a2fe44bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.719591 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.719639 4730 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.722204 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "522b231d-b240-485c-b52e-fbf2a2fe44bf" (UID: "522b231d-b240-485c-b52e-fbf2a2fe44bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.821259 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/522b231d-b240-485c-b52e-fbf2a2fe44bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.950026 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"880dcb98-9631-48d9-a6d6-715f3ea9d3d8","Type":"ContainerStarted","Data":"09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5"} Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.950097 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.950102 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="880dcb98-9631-48d9-a6d6-715f3ea9d3d8" containerName="cinder-api-log" containerID="cri-o://68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8" gracePeriod=30 Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.950155 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="880dcb98-9631-48d9-a6d6-715f3ea9d3d8" containerName="cinder-api" containerID="cri-o://09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5" gracePeriod=30 Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.957565 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"522b231d-b240-485c-b52e-fbf2a2fe44bf","Type":"ContainerDied","Data":"2060a76e00e7a1cbeb64c65b1f04e7b01d5e6b6f3f6afbf44e712853f14df38b"} Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.957640 4730 scope.go:117] "RemoveContainer" containerID="b08ba61ac80398fe5be6ce46ba02560ddb080e7f76384ef8a1175ddbe761af49" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.957644 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:08:27 crc kubenswrapper[4730]: I0930 10:08:27.985600 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.985578485 podStartE2EDuration="6.985578485s" podCreationTimestamp="2025-09-30 10:08:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:08:27.972658346 +0000 UTC m=+1152.305918349" watchObservedRunningTime="2025-09-30 10:08:27.985578485 +0000 UTC m=+1152.318838478" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.002033 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.006280 4730 scope.go:117] "RemoveContainer" containerID="5c459915c72a30167105b239d3121c064572a723a37eb6cd058065e0a964e5fd" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.010190 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.039536 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:08:28 crc kubenswrapper[4730]: E0930 10:08:28.041461 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="proxy-httpd" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041490 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="proxy-httpd" Sep 30 10:08:28 crc kubenswrapper[4730]: E0930 10:08:28.041507 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="ceilometer-notification-agent" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041515 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="ceilometer-notification-agent" Sep 30 10:08:28 crc kubenswrapper[4730]: E0930 10:08:28.041532 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="sg-core" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041541 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="sg-core" Sep 30 10:08:28 crc kubenswrapper[4730]: E0930 10:08:28.041558 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="ceilometer-central-agent" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041565 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="ceilometer-central-agent" Sep 30 10:08:28 crc kubenswrapper[4730]: E0930 10:08:28.041574 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a23804b2-1fe7-4b61-8b65-3d4be3166140" containerName="mariadb-database-create" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041582 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a23804b2-1fe7-4b61-8b65-3d4be3166140" containerName="mariadb-database-create" Sep 30 10:08:28 crc kubenswrapper[4730]: E0930 10:08:28.041621 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fee047bc-cc3d-4044-9045-955a116357df" containerName="mariadb-database-create" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041630 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="fee047bc-cc3d-4044-9045-955a116357df" containerName="mariadb-database-create" Sep 30 10:08:28 crc kubenswrapper[4730]: E0930 10:08:28.041667 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6750d3c8-c106-4c5a-a591-a243a9135831" containerName="mariadb-database-create" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041675 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="6750d3c8-c106-4c5a-a591-a243a9135831" containerName="mariadb-database-create" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041913 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="sg-core" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041930 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="fee047bc-cc3d-4044-9045-955a116357df" containerName="mariadb-database-create" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041955 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="proxy-httpd" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041968 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="a23804b2-1fe7-4b61-8b65-3d4be3166140" containerName="mariadb-database-create" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041981 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="ceilometer-notification-agent" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041990 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="6750d3c8-c106-4c5a-a591-a243a9135831" containerName="mariadb-database-create" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.041997 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" containerName="ceilometer-central-agent" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.042762 4730 scope.go:117] "RemoveContainer" containerID="96d0060e426320737b668a20b50713e0063ccf78a300b5f83f97b8eacddaf3ee" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.044770 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.047827 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.048076 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.050709 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.122923 4730 scope.go:117] "RemoveContainer" containerID="00f330622caf2a6afaff47b297f4c11aa71f7e71c60fec3679b1fbf6df90c535" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.128844 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.128929 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.128971 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-scripts\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.129055 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6985e94-3efc-48db-8690-cbf6a02d1047-log-httpd\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.129078 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4vbj\" (UniqueName: \"kubernetes.io/projected/d6985e94-3efc-48db-8690-cbf6a02d1047-kube-api-access-q4vbj\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.129167 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-config-data\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.129197 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6985e94-3efc-48db-8690-cbf6a02d1047-run-httpd\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.231269 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.231341 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.231379 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-scripts\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.231477 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6985e94-3efc-48db-8690-cbf6a02d1047-log-httpd\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.231499 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4vbj\" (UniqueName: \"kubernetes.io/projected/d6985e94-3efc-48db-8690-cbf6a02d1047-kube-api-access-q4vbj\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.231567 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-config-data\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.231583 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6985e94-3efc-48db-8690-cbf6a02d1047-run-httpd\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.232098 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6985e94-3efc-48db-8690-cbf6a02d1047-run-httpd\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.232105 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6985e94-3efc-48db-8690-cbf6a02d1047-log-httpd\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.238749 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-config-data\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.241977 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.249831 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-scripts\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.250696 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.252198 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4vbj\" (UniqueName: \"kubernetes.io/projected/d6985e94-3efc-48db-8690-cbf6a02d1047-kube-api-access-q4vbj\") pod \"ceilometer-0\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.333196 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.333417 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="e1641d17-bb96-477c-ae5f-39f8a1da719c" containerName="kube-state-metrics" containerID="cri-o://9606eb7ce234a02673a3c68bea28caa4fc124f2b2960f22147b0ecec38d2efa8" gracePeriod=30 Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.378815 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.416299 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="522b231d-b240-485c-b52e-fbf2a2fe44bf" path="/var/lib/kubelet/pods/522b231d-b240-485c-b52e-fbf2a2fe44bf/volumes" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.641828 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.739414 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9kxwb\" (UniqueName: \"kubernetes.io/projected/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-kube-api-access-9kxwb\") pod \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.739639 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-combined-ca-bundle\") pod \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.739719 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-logs\") pod \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.739752 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-config-data\") pod \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.739821 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-etc-machine-id\") pod \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.739883 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-config-data-custom\") pod \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.739920 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-scripts\") pod \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\" (UID: \"880dcb98-9631-48d9-a6d6-715f3ea9d3d8\") " Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.742304 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "880dcb98-9631-48d9-a6d6-715f3ea9d3d8" (UID: "880dcb98-9631-48d9-a6d6-715f3ea9d3d8"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.743009 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-logs" (OuterVolumeSpecName: "logs") pod "880dcb98-9631-48d9-a6d6-715f3ea9d3d8" (UID: "880dcb98-9631-48d9-a6d6-715f3ea9d3d8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.746883 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-kube-api-access-9kxwb" (OuterVolumeSpecName: "kube-api-access-9kxwb") pod "880dcb98-9631-48d9-a6d6-715f3ea9d3d8" (UID: "880dcb98-9631-48d9-a6d6-715f3ea9d3d8"). InnerVolumeSpecName "kube-api-access-9kxwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.747885 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "880dcb98-9631-48d9-a6d6-715f3ea9d3d8" (UID: "880dcb98-9631-48d9-a6d6-715f3ea9d3d8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.749654 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-scripts" (OuterVolumeSpecName: "scripts") pod "880dcb98-9631-48d9-a6d6-715f3ea9d3d8" (UID: "880dcb98-9631-48d9-a6d6-715f3ea9d3d8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.786977 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "880dcb98-9631-48d9-a6d6-715f3ea9d3d8" (UID: "880dcb98-9631-48d9-a6d6-715f3ea9d3d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.843311 4730 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.843891 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.843906 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9kxwb\" (UniqueName: \"kubernetes.io/projected/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-kube-api-access-9kxwb\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.843916 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.843927 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.843936 4730 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.853537 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.856590 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-config-data" (OuterVolumeSpecName: "config-data") pod "880dcb98-9631-48d9-a6d6-715f3ea9d3d8" (UID: "880dcb98-9631-48d9-a6d6-715f3ea9d3d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.942640 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 10:08:28 crc kubenswrapper[4730]: I0930 10:08:28.945988 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/880dcb98-9631-48d9-a6d6-715f3ea9d3d8-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.047643 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdpr9\" (UniqueName: \"kubernetes.io/projected/e1641d17-bb96-477c-ae5f-39f8a1da719c-kube-api-access-gdpr9\") pod \"e1641d17-bb96-477c-ae5f-39f8a1da719c\" (UID: \"e1641d17-bb96-477c-ae5f-39f8a1da719c\") " Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.055145 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1641d17-bb96-477c-ae5f-39f8a1da719c-kube-api-access-gdpr9" (OuterVolumeSpecName: "kube-api-access-gdpr9") pod "e1641d17-bb96-477c-ae5f-39f8a1da719c" (UID: "e1641d17-bb96-477c-ae5f-39f8a1da719c"). InnerVolumeSpecName "kube-api-access-gdpr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.059221 4730 generic.go:334] "Generic (PLEG): container finished" podID="e1641d17-bb96-477c-ae5f-39f8a1da719c" containerID="9606eb7ce234a02673a3c68bea28caa4fc124f2b2960f22147b0ecec38d2efa8" exitCode=2 Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.059319 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e1641d17-bb96-477c-ae5f-39f8a1da719c","Type":"ContainerDied","Data":"9606eb7ce234a02673a3c68bea28caa4fc124f2b2960f22147b0ecec38d2efa8"} Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.059369 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e1641d17-bb96-477c-ae5f-39f8a1da719c","Type":"ContainerDied","Data":"f8cf98abb9b07b3646be434e3f653c67360997eb3f71110ee9cc3913fc3f4c7b"} Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.059386 4730 scope.go:117] "RemoveContainer" containerID="9606eb7ce234a02673a3c68bea28caa4fc124f2b2960f22147b0ecec38d2efa8" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.059496 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.064355 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6985e94-3efc-48db-8690-cbf6a02d1047","Type":"ContainerStarted","Data":"4456140d346dbb26ed91003c7d198cddd960478bb65192a7961753b9ee50a11b"} Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.069863 4730 generic.go:334] "Generic (PLEG): container finished" podID="880dcb98-9631-48d9-a6d6-715f3ea9d3d8" containerID="09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5" exitCode=0 Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.069888 4730 generic.go:334] "Generic (PLEG): container finished" podID="880dcb98-9631-48d9-a6d6-715f3ea9d3d8" containerID="68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8" exitCode=143 Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.069922 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"880dcb98-9631-48d9-a6d6-715f3ea9d3d8","Type":"ContainerDied","Data":"09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5"} Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.069942 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"880dcb98-9631-48d9-a6d6-715f3ea9d3d8","Type":"ContainerDied","Data":"68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8"} Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.069952 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"880dcb98-9631-48d9-a6d6-715f3ea9d3d8","Type":"ContainerDied","Data":"b4cadf1c05df5797545320b4e669cae97251e7b2d46334f24794895b4b162206"} Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.070005 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.096218 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.098550 4730 scope.go:117] "RemoveContainer" containerID="9606eb7ce234a02673a3c68bea28caa4fc124f2b2960f22147b0ecec38d2efa8" Sep 30 10:08:29 crc kubenswrapper[4730]: E0930 10:08:29.099100 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9606eb7ce234a02673a3c68bea28caa4fc124f2b2960f22147b0ecec38d2efa8\": container with ID starting with 9606eb7ce234a02673a3c68bea28caa4fc124f2b2960f22147b0ecec38d2efa8 not found: ID does not exist" containerID="9606eb7ce234a02673a3c68bea28caa4fc124f2b2960f22147b0ecec38d2efa8" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.099218 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9606eb7ce234a02673a3c68bea28caa4fc124f2b2960f22147b0ecec38d2efa8"} err="failed to get container status \"9606eb7ce234a02673a3c68bea28caa4fc124f2b2960f22147b0ecec38d2efa8\": rpc error: code = NotFound desc = could not find container \"9606eb7ce234a02673a3c68bea28caa4fc124f2b2960f22147b0ecec38d2efa8\": container with ID starting with 9606eb7ce234a02673a3c68bea28caa4fc124f2b2960f22147b0ecec38d2efa8 not found: ID does not exist" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.099313 4730 scope.go:117] "RemoveContainer" containerID="09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.106313 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.135679 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 10:08:29 crc kubenswrapper[4730]: E0930 10:08:29.136082 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="880dcb98-9631-48d9-a6d6-715f3ea9d3d8" containerName="cinder-api-log" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.136098 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="880dcb98-9631-48d9-a6d6-715f3ea9d3d8" containerName="cinder-api-log" Sep 30 10:08:29 crc kubenswrapper[4730]: E0930 10:08:29.136112 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="880dcb98-9631-48d9-a6d6-715f3ea9d3d8" containerName="cinder-api" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.136118 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="880dcb98-9631-48d9-a6d6-715f3ea9d3d8" containerName="cinder-api" Sep 30 10:08:29 crc kubenswrapper[4730]: E0930 10:08:29.136145 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1641d17-bb96-477c-ae5f-39f8a1da719c" containerName="kube-state-metrics" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.136160 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1641d17-bb96-477c-ae5f-39f8a1da719c" containerName="kube-state-metrics" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.136353 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="880dcb98-9631-48d9-a6d6-715f3ea9d3d8" containerName="cinder-api" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.136373 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1641d17-bb96-477c-ae5f-39f8a1da719c" containerName="kube-state-metrics" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.136386 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="880dcb98-9631-48d9-a6d6-715f3ea9d3d8" containerName="cinder-api-log" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.137015 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.141893 4730 scope.go:117] "RemoveContainer" containerID="68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.141987 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.142242 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.150349 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdpr9\" (UniqueName: \"kubernetes.io/projected/e1641d17-bb96-477c-ae5f-39f8a1da719c-kube-api-access-gdpr9\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.156930 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.186683 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.188291 4730 scope.go:117] "RemoveContainer" containerID="09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5" Sep 30 10:08:29 crc kubenswrapper[4730]: E0930 10:08:29.191178 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5\": container with ID starting with 09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5 not found: ID does not exist" containerID="09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.191226 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5"} err="failed to get container status \"09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5\": rpc error: code = NotFound desc = could not find container \"09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5\": container with ID starting with 09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5 not found: ID does not exist" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.191254 4730 scope.go:117] "RemoveContainer" containerID="68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8" Sep 30 10:08:29 crc kubenswrapper[4730]: E0930 10:08:29.192657 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8\": container with ID starting with 68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8 not found: ID does not exist" containerID="68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.192793 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8"} err="failed to get container status \"68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8\": rpc error: code = NotFound desc = could not find container \"68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8\": container with ID starting with 68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8 not found: ID does not exist" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.192937 4730 scope.go:117] "RemoveContainer" containerID="09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.193272 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5"} err="failed to get container status \"09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5\": rpc error: code = NotFound desc = could not find container \"09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5\": container with ID starting with 09807107cc4519c4c4a1da148c3bc4853d6ffb40ac19ac9ea6eed19e2ceba9e5 not found: ID does not exist" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.193387 4730 scope.go:117] "RemoveContainer" containerID="68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.193750 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8"} err="failed to get container status \"68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8\": rpc error: code = NotFound desc = could not find container \"68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8\": container with ID starting with 68cbbcabe9c7818668e040d8940320e47942aa440291c156d1049ca79c1074c8 not found: ID does not exist" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.205515 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.216848 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.218668 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.222050 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.222300 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.222924 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.256332 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c0ac696-252a-4b32-8086-a6d3a02e945f-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"5c0ac696-252a-4b32-8086-a6d3a02e945f\") " pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.256399 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/5c0ac696-252a-4b32-8086-a6d3a02e945f-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"5c0ac696-252a-4b32-8086-a6d3a02e945f\") " pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.256443 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c0ac696-252a-4b32-8086-a6d3a02e945f-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"5c0ac696-252a-4b32-8086-a6d3a02e945f\") " pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.256884 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42kzx\" (UniqueName: \"kubernetes.io/projected/5c0ac696-252a-4b32-8086-a6d3a02e945f-kube-api-access-42kzx\") pod \"kube-state-metrics-0\" (UID: \"5c0ac696-252a-4b32-8086-a6d3a02e945f\") " pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.264671 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.358435 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/063272c9-558f-47d6-84c0-aa0de64bb715-logs\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.358476 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szgv8\" (UniqueName: \"kubernetes.io/projected/063272c9-558f-47d6-84c0-aa0de64bb715-kube-api-access-szgv8\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.358496 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-config-data-custom\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.358557 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.358576 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-scripts\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.358657 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/063272c9-558f-47d6-84c0-aa0de64bb715-etc-machine-id\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.358763 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-config-data\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.358863 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.358910 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-public-tls-certs\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.358976 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42kzx\" (UniqueName: \"kubernetes.io/projected/5c0ac696-252a-4b32-8086-a6d3a02e945f-kube-api-access-42kzx\") pod \"kube-state-metrics-0\" (UID: \"5c0ac696-252a-4b32-8086-a6d3a02e945f\") " pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.359015 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c0ac696-252a-4b32-8086-a6d3a02e945f-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"5c0ac696-252a-4b32-8086-a6d3a02e945f\") " pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.359037 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/5c0ac696-252a-4b32-8086-a6d3a02e945f-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"5c0ac696-252a-4b32-8086-a6d3a02e945f\") " pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.359059 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c0ac696-252a-4b32-8086-a6d3a02e945f-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"5c0ac696-252a-4b32-8086-a6d3a02e945f\") " pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.363008 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/5c0ac696-252a-4b32-8086-a6d3a02e945f-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"5c0ac696-252a-4b32-8086-a6d3a02e945f\") " pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.363581 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c0ac696-252a-4b32-8086-a6d3a02e945f-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"5c0ac696-252a-4b32-8086-a6d3a02e945f\") " pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.363882 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c0ac696-252a-4b32-8086-a6d3a02e945f-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"5c0ac696-252a-4b32-8086-a6d3a02e945f\") " pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.375155 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42kzx\" (UniqueName: \"kubernetes.io/projected/5c0ac696-252a-4b32-8086-a6d3a02e945f-kube-api-access-42kzx\") pod \"kube-state-metrics-0\" (UID: \"5c0ac696-252a-4b32-8086-a6d3a02e945f\") " pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.461578 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szgv8\" (UniqueName: \"kubernetes.io/projected/063272c9-558f-47d6-84c0-aa0de64bb715-kube-api-access-szgv8\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.461641 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/063272c9-558f-47d6-84c0-aa0de64bb715-logs\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.461659 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.461674 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-config-data-custom\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.461701 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-scripts\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.461730 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/063272c9-558f-47d6-84c0-aa0de64bb715-etc-machine-id\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.461771 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-config-data\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.461828 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.461845 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-public-tls-certs\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.462218 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/063272c9-558f-47d6-84c0-aa0de64bb715-etc-machine-id\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.462715 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/063272c9-558f-47d6-84c0-aa0de64bb715-logs\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.466858 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-public-tls-certs\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.468220 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-scripts\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.468320 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-config-data-custom\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.468557 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-config-data\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.469003 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.469164 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.470008 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/063272c9-558f-47d6-84c0-aa0de64bb715-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.482315 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szgv8\" (UniqueName: \"kubernetes.io/projected/063272c9-558f-47d6-84c0-aa0de64bb715-kube-api-access-szgv8\") pod \"cinder-api-0\" (UID: \"063272c9-558f-47d6-84c0-aa0de64bb715\") " pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.540432 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.612994 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:08:29 crc kubenswrapper[4730]: I0930 10:08:29.792421 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:08:30 crc kubenswrapper[4730]: I0930 10:08:30.015423 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 10:08:30 crc kubenswrapper[4730]: W0930 10:08:30.019956 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c0ac696_252a_4b32_8086_a6d3a02e945f.slice/crio-e6628feb669e917396e70120fb0036751362ff7f32d896844c693e3bd768da30 WatchSource:0}: Error finding container e6628feb669e917396e70120fb0036751362ff7f32d896844c693e3bd768da30: Status 404 returned error can't find the container with id e6628feb669e917396e70120fb0036751362ff7f32d896844c693e3bd768da30 Sep 30 10:08:30 crc kubenswrapper[4730]: I0930 10:08:30.089862 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5c0ac696-252a-4b32-8086-a6d3a02e945f","Type":"ContainerStarted","Data":"e6628feb669e917396e70120fb0036751362ff7f32d896844c693e3bd768da30"} Sep 30 10:08:30 crc kubenswrapper[4730]: I0930 10:08:30.092983 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6985e94-3efc-48db-8690-cbf6a02d1047","Type":"ContainerStarted","Data":"727470f5d9cb45629f5cba1e64cdd500b9399ac5fcaade575b8c75cfd7ba41c6"} Sep 30 10:08:30 crc kubenswrapper[4730]: I0930 10:08:30.093032 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6985e94-3efc-48db-8690-cbf6a02d1047","Type":"ContainerStarted","Data":"de93152791519a2619ea6d9b80b5f0f2551bb5d198d8b41743918fbfa31c02b2"} Sep 30 10:08:30 crc kubenswrapper[4730]: I0930 10:08:30.198218 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 10:08:30 crc kubenswrapper[4730]: I0930 10:08:30.404899 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="880dcb98-9631-48d9-a6d6-715f3ea9d3d8" path="/var/lib/kubelet/pods/880dcb98-9631-48d9-a6d6-715f3ea9d3d8/volumes" Sep 30 10:08:30 crc kubenswrapper[4730]: I0930 10:08:30.405813 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1641d17-bb96-477c-ae5f-39f8a1da719c" path="/var/lib/kubelet/pods/e1641d17-bb96-477c-ae5f-39f8a1da719c/volumes" Sep 30 10:08:31 crc kubenswrapper[4730]: I0930 10:08:31.110339 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6985e94-3efc-48db-8690-cbf6a02d1047","Type":"ContainerStarted","Data":"b6293638e08d3026b482ce36b357ae0b0c6579b9ec2f71b17855a4827d6abd59"} Sep 30 10:08:31 crc kubenswrapper[4730]: I0930 10:08:31.112905 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5c0ac696-252a-4b32-8086-a6d3a02e945f","Type":"ContainerStarted","Data":"e809099f568ce64d7f24e0afcc8034aa790b0463c7d146ed42a883c272a39724"} Sep 30 10:08:31 crc kubenswrapper[4730]: I0930 10:08:31.114262 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"063272c9-558f-47d6-84c0-aa0de64bb715","Type":"ContainerStarted","Data":"314c99cbce0bc4d8ce349104547fc33bebcb58993db0c81c0234a00e259de26e"} Sep 30 10:08:31 crc kubenswrapper[4730]: I0930 10:08:31.114297 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"063272c9-558f-47d6-84c0-aa0de64bb715","Type":"ContainerStarted","Data":"5f34e3dc04b91082eaf3f0dd82b502c64ba2cb6d00ef0deee0df3cd1a4455360"} Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.126307 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"063272c9-558f-47d6-84c0-aa0de64bb715","Type":"ContainerStarted","Data":"d4a876f085e40d81021cb957f660f86fe9ada296def071215af53969a46b9615"} Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.126701 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.149887 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.661271787 podStartE2EDuration="3.149861705s" podCreationTimestamp="2025-09-30 10:08:29 +0000 UTC" firstStartedPulling="2025-09-30 10:08:30.021790096 +0000 UTC m=+1154.355050089" lastFinishedPulling="2025-09-30 10:08:30.510380014 +0000 UTC m=+1154.843640007" observedRunningTime="2025-09-30 10:08:32.140258591 +0000 UTC m=+1156.473518594" watchObservedRunningTime="2025-09-30 10:08:32.149861705 +0000 UTC m=+1156.483121698" Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.161114 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.16109974 podStartE2EDuration="3.16109974s" podCreationTimestamp="2025-09-30 10:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:08:32.158389201 +0000 UTC m=+1156.491649194" watchObservedRunningTime="2025-09-30 10:08:32.16109974 +0000 UTC m=+1156.494359723" Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.299817 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.315817 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.347503 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.440207 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66f5888b75-zf578"] Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.441649 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-66f5888b75-zf578" podUID="1e41c10c-2d54-4df9-bda4-44ae2eba244d" containerName="dnsmasq-dns" containerID="cri-o://5b38ef64b24106bc16f7f756ed4b0b6fa4e16e1ee9911569f8de45ac8222186b" gracePeriod=10 Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.485051 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-9cf79fcd5-4nrfn" Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.561411 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-f956cddd4-sbtc4"] Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.561939 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-f956cddd4-sbtc4" podUID="bc4d7fc4-7053-4985-831a-093ad13a9f45" containerName="neutron-api" containerID="cri-o://d7d350d869da1d66db5b82c41829cc173cb048a4a8c2e2f500f8b1dc0eaecd13" gracePeriod=30 Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.562070 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-f956cddd4-sbtc4" podUID="bc4d7fc4-7053-4985-831a-093ad13a9f45" containerName="neutron-httpd" containerID="cri-o://809eeb547f49f15792831ab80ae1ae4ab3eceb9263a950f4d87aaa5556072bf2" gracePeriod=30 Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.992648 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 30 10:08:32 crc kubenswrapper[4730]: I0930 10:08:32.993523 4730 scope.go:117] "RemoveContainer" containerID="767915a10d018b40c18d12dc3c1d3b41568181e4b1ea08a89ae112a458629c45" Sep 30 10:08:32 crc kubenswrapper[4730]: E0930 10:08:32.993800 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(a4f9bd21-5f86-4443-87be-eadb5d1c77f9)\"" pod="openstack/watcher-decision-engine-0" podUID="a4f9bd21-5f86-4443-87be-eadb5d1c77f9" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.144664 4730 generic.go:334] "Generic (PLEG): container finished" podID="bc4d7fc4-7053-4985-831a-093ad13a9f45" containerID="809eeb547f49f15792831ab80ae1ae4ab3eceb9263a950f4d87aaa5556072bf2" exitCode=0 Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.144736 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f956cddd4-sbtc4" event={"ID":"bc4d7fc4-7053-4985-831a-093ad13a9f45","Type":"ContainerDied","Data":"809eeb547f49f15792831ab80ae1ae4ab3eceb9263a950f4d87aaa5556072bf2"} Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.146990 4730 generic.go:334] "Generic (PLEG): container finished" podID="1e41c10c-2d54-4df9-bda4-44ae2eba244d" containerID="5b38ef64b24106bc16f7f756ed4b0b6fa4e16e1ee9911569f8de45ac8222186b" exitCode=0 Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.147062 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f5888b75-zf578" event={"ID":"1e41c10c-2d54-4df9-bda4-44ae2eba244d","Type":"ContainerDied","Data":"5b38ef64b24106bc16f7f756ed4b0b6fa4e16e1ee9911569f8de45ac8222186b"} Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.156140 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7df8ec89-cd12-419e-83a2-ca0d04205f41" containerName="cinder-scheduler" containerID="cri-o://6b24b471a3235e8d01a54393e74a1e7397d0deab97d3c165bd9fdc5d5aa38e4c" gracePeriod=30 Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.156555 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="ceilometer-central-agent" containerID="cri-o://de93152791519a2619ea6d9b80b5f0f2551bb5d198d8b41743918fbfa31c02b2" gracePeriod=30 Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.156641 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6985e94-3efc-48db-8690-cbf6a02d1047","Type":"ContainerStarted","Data":"6b0ada1daa83e32043ed7d430e4b903beef8dead9cd949f3bf143561e788b37f"} Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.157643 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.158292 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7df8ec89-cd12-419e-83a2-ca0d04205f41" containerName="probe" containerID="cri-o://0b2b9d608b6600aa9fc53e605ebac1990a1916ee85de3e98c9592e5d2e3c6786" gracePeriod=30 Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.158350 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.158387 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="proxy-httpd" containerID="cri-o://6b0ada1daa83e32043ed7d430e4b903beef8dead9cd949f3bf143561e788b37f" gracePeriod=30 Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.158447 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="sg-core" containerID="cri-o://b6293638e08d3026b482ce36b357ae0b0c6579b9ec2f71b17855a4827d6abd59" gracePeriod=30 Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.158480 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="ceilometer-notification-agent" containerID="cri-o://727470f5d9cb45629f5cba1e64cdd500b9399ac5fcaade575b8c75cfd7ba41c6" gracePeriod=30 Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.197255 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.272813445 podStartE2EDuration="5.197230405s" podCreationTimestamp="2025-09-30 10:08:28 +0000 UTC" firstStartedPulling="2025-09-30 10:08:28.864849439 +0000 UTC m=+1153.198109432" lastFinishedPulling="2025-09-30 10:08:32.789266399 +0000 UTC m=+1157.122526392" observedRunningTime="2025-09-30 10:08:33.178732805 +0000 UTC m=+1157.511992808" watchObservedRunningTime="2025-09-30 10:08:33.197230405 +0000 UTC m=+1157.530490398" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.463922 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.558153 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-config\") pod \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.558282 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-ovsdbserver-sb\") pod \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.558378 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-dns-svc\") pod \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.558430 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdzvc\" (UniqueName: \"kubernetes.io/projected/1e41c10c-2d54-4df9-bda4-44ae2eba244d-kube-api-access-qdzvc\") pod \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.558458 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-ovsdbserver-nb\") pod \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\" (UID: \"1e41c10c-2d54-4df9-bda4-44ae2eba244d\") " Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.571050 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e41c10c-2d54-4df9-bda4-44ae2eba244d-kube-api-access-qdzvc" (OuterVolumeSpecName: "kube-api-access-qdzvc") pod "1e41c10c-2d54-4df9-bda4-44ae2eba244d" (UID: "1e41c10c-2d54-4df9-bda4-44ae2eba244d"). InnerVolumeSpecName "kube-api-access-qdzvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.639445 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1e41c10c-2d54-4df9-bda4-44ae2eba244d" (UID: "1e41c10c-2d54-4df9-bda4-44ae2eba244d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.646760 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1e41c10c-2d54-4df9-bda4-44ae2eba244d" (UID: "1e41c10c-2d54-4df9-bda4-44ae2eba244d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.661815 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.662090 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdzvc\" (UniqueName: \"kubernetes.io/projected/1e41c10c-2d54-4df9-bda4-44ae2eba244d-kube-api-access-qdzvc\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.662229 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.675169 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-config" (OuterVolumeSpecName: "config") pod "1e41c10c-2d54-4df9-bda4-44ae2eba244d" (UID: "1e41c10c-2d54-4df9-bda4-44ae2eba244d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.682039 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1e41c10c-2d54-4df9-bda4-44ae2eba244d" (UID: "1e41c10c-2d54-4df9-bda4-44ae2eba244d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.764053 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.764091 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e41c10c-2d54-4df9-bda4-44ae2eba244d-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.825795 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-6770-account-create-7j85m"] Sep 30 10:08:33 crc kubenswrapper[4730]: E0930 10:08:33.826167 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e41c10c-2d54-4df9-bda4-44ae2eba244d" containerName="dnsmasq-dns" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.826183 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e41c10c-2d54-4df9-bda4-44ae2eba244d" containerName="dnsmasq-dns" Sep 30 10:08:33 crc kubenswrapper[4730]: E0930 10:08:33.826211 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e41c10c-2d54-4df9-bda4-44ae2eba244d" containerName="init" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.826218 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e41c10c-2d54-4df9-bda4-44ae2eba244d" containerName="init" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.826381 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e41c10c-2d54-4df9-bda4-44ae2eba244d" containerName="dnsmasq-dns" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.826964 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6770-account-create-7j85m" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.834744 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6770-account-create-7j85m"] Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.835402 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.866142 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwlsb\" (UniqueName: \"kubernetes.io/projected/51b81319-5b51-4d86-8e0f-d5c955ea145b-kube-api-access-qwlsb\") pod \"nova-api-6770-account-create-7j85m\" (UID: \"51b81319-5b51-4d86-8e0f-d5c955ea145b\") " pod="openstack/nova-api-6770-account-create-7j85m" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.967508 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwlsb\" (UniqueName: \"kubernetes.io/projected/51b81319-5b51-4d86-8e0f-d5c955ea145b-kube-api-access-qwlsb\") pod \"nova-api-6770-account-create-7j85m\" (UID: \"51b81319-5b51-4d86-8e0f-d5c955ea145b\") " pod="openstack/nova-api-6770-account-create-7j85m" Sep 30 10:08:33 crc kubenswrapper[4730]: I0930 10:08:33.992410 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwlsb\" (UniqueName: \"kubernetes.io/projected/51b81319-5b51-4d86-8e0f-d5c955ea145b-kube-api-access-qwlsb\") pod \"nova-api-6770-account-create-7j85m\" (UID: \"51b81319-5b51-4d86-8e0f-d5c955ea145b\") " pod="openstack/nova-api-6770-account-create-7j85m" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.024343 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-beff-account-create-w69vt"] Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.025775 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-beff-account-create-w69vt" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.028909 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.033457 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-beff-account-create-w69vt"] Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.069430 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jggc6\" (UniqueName: \"kubernetes.io/projected/3a4ff200-9ad7-4a4b-afe8-4170071efc67-kube-api-access-jggc6\") pod \"nova-cell0-beff-account-create-w69vt\" (UID: \"3a4ff200-9ad7-4a4b-afe8-4170071efc67\") " pod="openstack/nova-cell0-beff-account-create-w69vt" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.154783 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6770-account-create-7j85m" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.167421 4730 generic.go:334] "Generic (PLEG): container finished" podID="7df8ec89-cd12-419e-83a2-ca0d04205f41" containerID="0b2b9d608b6600aa9fc53e605ebac1990a1916ee85de3e98c9592e5d2e3c6786" exitCode=0 Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.167497 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7df8ec89-cd12-419e-83a2-ca0d04205f41","Type":"ContainerDied","Data":"0b2b9d608b6600aa9fc53e605ebac1990a1916ee85de3e98c9592e5d2e3c6786"} Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.169430 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66f5888b75-zf578" event={"ID":"1e41c10c-2d54-4df9-bda4-44ae2eba244d","Type":"ContainerDied","Data":"8a27c90ca5297a4ddb443e04f62b6a1557e0ad7b0dc01868fb4426c074743f16"} Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.169460 4730 scope.go:117] "RemoveContainer" containerID="5b38ef64b24106bc16f7f756ed4b0b6fa4e16e1ee9911569f8de45ac8222186b" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.169580 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66f5888b75-zf578" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.177004 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jggc6\" (UniqueName: \"kubernetes.io/projected/3a4ff200-9ad7-4a4b-afe8-4170071efc67-kube-api-access-jggc6\") pod \"nova-cell0-beff-account-create-w69vt\" (UID: \"3a4ff200-9ad7-4a4b-afe8-4170071efc67\") " pod="openstack/nova-cell0-beff-account-create-w69vt" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.177669 4730 generic.go:334] "Generic (PLEG): container finished" podID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerID="6b0ada1daa83e32043ed7d430e4b903beef8dead9cd949f3bf143561e788b37f" exitCode=0 Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.177699 4730 generic.go:334] "Generic (PLEG): container finished" podID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerID="b6293638e08d3026b482ce36b357ae0b0c6579b9ec2f71b17855a4827d6abd59" exitCode=2 Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.177709 4730 generic.go:334] "Generic (PLEG): container finished" podID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerID="727470f5d9cb45629f5cba1e64cdd500b9399ac5fcaade575b8c75cfd7ba41c6" exitCode=0 Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.177729 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6985e94-3efc-48db-8690-cbf6a02d1047","Type":"ContainerDied","Data":"6b0ada1daa83e32043ed7d430e4b903beef8dead9cd949f3bf143561e788b37f"} Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.177751 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6985e94-3efc-48db-8690-cbf6a02d1047","Type":"ContainerDied","Data":"b6293638e08d3026b482ce36b357ae0b0c6579b9ec2f71b17855a4827d6abd59"} Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.177762 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6985e94-3efc-48db-8690-cbf6a02d1047","Type":"ContainerDied","Data":"727470f5d9cb45629f5cba1e64cdd500b9399ac5fcaade575b8c75cfd7ba41c6"} Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.197993 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jggc6\" (UniqueName: \"kubernetes.io/projected/3a4ff200-9ad7-4a4b-afe8-4170071efc67-kube-api-access-jggc6\") pod \"nova-cell0-beff-account-create-w69vt\" (UID: \"3a4ff200-9ad7-4a4b-afe8-4170071efc67\") " pod="openstack/nova-cell0-beff-account-create-w69vt" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.220548 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-5004-account-create-kzhh4"] Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.221681 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5004-account-create-kzhh4" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.223755 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.233162 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66f5888b75-zf578"] Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.242125 4730 scope.go:117] "RemoveContainer" containerID="b3c141381dfcb9c52caa9056b6c0033b9394c79be915aa6268a991421e3d2f6b" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.267363 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66f5888b75-zf578"] Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.283457 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ww6w4\" (UniqueName: \"kubernetes.io/projected/7070bd97-620f-4ad8-b7c1-044902ae6857-kube-api-access-ww6w4\") pod \"nova-cell1-5004-account-create-kzhh4\" (UID: \"7070bd97-620f-4ad8-b7c1-044902ae6857\") " pod="openstack/nova-cell1-5004-account-create-kzhh4" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.297400 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-5004-account-create-kzhh4"] Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.372479 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-beff-account-create-w69vt" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.386440 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ww6w4\" (UniqueName: \"kubernetes.io/projected/7070bd97-620f-4ad8-b7c1-044902ae6857-kube-api-access-ww6w4\") pod \"nova-cell1-5004-account-create-kzhh4\" (UID: \"7070bd97-620f-4ad8-b7c1-044902ae6857\") " pod="openstack/nova-cell1-5004-account-create-kzhh4" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.401997 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e41c10c-2d54-4df9-bda4-44ae2eba244d" path="/var/lib/kubelet/pods/1e41c10c-2d54-4df9-bda4-44ae2eba244d/volumes" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.410104 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ww6w4\" (UniqueName: \"kubernetes.io/projected/7070bd97-620f-4ad8-b7c1-044902ae6857-kube-api-access-ww6w4\") pod \"nova-cell1-5004-account-create-kzhh4\" (UID: \"7070bd97-620f-4ad8-b7c1-044902ae6857\") " pod="openstack/nova-cell1-5004-account-create-kzhh4" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.556144 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5004-account-create-kzhh4" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.717801 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6770-account-create-7j85m"] Sep 30 10:08:34 crc kubenswrapper[4730]: W0930 10:08:34.718453 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51b81319_5b51_4d86_8e0f_d5c955ea145b.slice/crio-afebca84ab6c1c1e3d8c0560b9c9208bb9e16eb312c71dc49966ba6dd995c6d4 WatchSource:0}: Error finding container afebca84ab6c1c1e3d8c0560b9c9208bb9e16eb312c71dc49966ba6dd995c6d4: Status 404 returned error can't find the container with id afebca84ab6c1c1e3d8c0560b9c9208bb9e16eb312c71dc49966ba6dd995c6d4 Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.893139 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:08:34 crc kubenswrapper[4730]: I0930 10:08:34.973775 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-beff-account-create-w69vt"] Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.002435 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-combined-ca-bundle\") pod \"d6985e94-3efc-48db-8690-cbf6a02d1047\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.002480 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4vbj\" (UniqueName: \"kubernetes.io/projected/d6985e94-3efc-48db-8690-cbf6a02d1047-kube-api-access-q4vbj\") pod \"d6985e94-3efc-48db-8690-cbf6a02d1047\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.002512 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-scripts\") pod \"d6985e94-3efc-48db-8690-cbf6a02d1047\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.002554 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-sg-core-conf-yaml\") pod \"d6985e94-3efc-48db-8690-cbf6a02d1047\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.002603 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6985e94-3efc-48db-8690-cbf6a02d1047-run-httpd\") pod \"d6985e94-3efc-48db-8690-cbf6a02d1047\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.002808 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6985e94-3efc-48db-8690-cbf6a02d1047-log-httpd\") pod \"d6985e94-3efc-48db-8690-cbf6a02d1047\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.002830 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-config-data\") pod \"d6985e94-3efc-48db-8690-cbf6a02d1047\" (UID: \"d6985e94-3efc-48db-8690-cbf6a02d1047\") " Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.009113 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6985e94-3efc-48db-8690-cbf6a02d1047-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d6985e94-3efc-48db-8690-cbf6a02d1047" (UID: "d6985e94-3efc-48db-8690-cbf6a02d1047"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.009176 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6985e94-3efc-48db-8690-cbf6a02d1047-kube-api-access-q4vbj" (OuterVolumeSpecName: "kube-api-access-q4vbj") pod "d6985e94-3efc-48db-8690-cbf6a02d1047" (UID: "d6985e94-3efc-48db-8690-cbf6a02d1047"). InnerVolumeSpecName "kube-api-access-q4vbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.009308 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6985e94-3efc-48db-8690-cbf6a02d1047-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d6985e94-3efc-48db-8690-cbf6a02d1047" (UID: "d6985e94-3efc-48db-8690-cbf6a02d1047"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.016317 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-scripts" (OuterVolumeSpecName: "scripts") pod "d6985e94-3efc-48db-8690-cbf6a02d1047" (UID: "d6985e94-3efc-48db-8690-cbf6a02d1047"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.035162 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d6985e94-3efc-48db-8690-cbf6a02d1047" (UID: "d6985e94-3efc-48db-8690-cbf6a02d1047"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.100727 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-5004-account-create-kzhh4"] Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.104562 4730 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6985e94-3efc-48db-8690-cbf6a02d1047-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.104617 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4vbj\" (UniqueName: \"kubernetes.io/projected/d6985e94-3efc-48db-8690-cbf6a02d1047-kube-api-access-q4vbj\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.104654 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.104666 4730 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.104677 4730 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6985e94-3efc-48db-8690-cbf6a02d1047-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.146942 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d6985e94-3efc-48db-8690-cbf6a02d1047" (UID: "d6985e94-3efc-48db-8690-cbf6a02d1047"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.159411 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-config-data" (OuterVolumeSpecName: "config-data") pod "d6985e94-3efc-48db-8690-cbf6a02d1047" (UID: "d6985e94-3efc-48db-8690-cbf6a02d1047"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.194261 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5004-account-create-kzhh4" event={"ID":"7070bd97-620f-4ad8-b7c1-044902ae6857","Type":"ContainerStarted","Data":"878cd673eacc91a733120a98aa617a9c978a69bd3c1808e58ea12d6969300336"} Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.199928 4730 generic.go:334] "Generic (PLEG): container finished" podID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerID="de93152791519a2619ea6d9b80b5f0f2551bb5d198d8b41743918fbfa31c02b2" exitCode=0 Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.199986 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6985e94-3efc-48db-8690-cbf6a02d1047","Type":"ContainerDied","Data":"de93152791519a2619ea6d9b80b5f0f2551bb5d198d8b41743918fbfa31c02b2"} Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.200006 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d6985e94-3efc-48db-8690-cbf6a02d1047","Type":"ContainerDied","Data":"4456140d346dbb26ed91003c7d198cddd960478bb65192a7961753b9ee50a11b"} Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.200021 4730 scope.go:117] "RemoveContainer" containerID="6b0ada1daa83e32043ed7d430e4b903beef8dead9cd949f3bf143561e788b37f" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.200114 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.206948 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.207232 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6985e94-3efc-48db-8690-cbf6a02d1047-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.210536 4730 generic.go:334] "Generic (PLEG): container finished" podID="51b81319-5b51-4d86-8e0f-d5c955ea145b" containerID="77ac1d3684b99ad78e5a7335a7932053b3d8c3347dc6bc7967706174f0476885" exitCode=0 Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.210663 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6770-account-create-7j85m" event={"ID":"51b81319-5b51-4d86-8e0f-d5c955ea145b","Type":"ContainerDied","Data":"77ac1d3684b99ad78e5a7335a7932053b3d8c3347dc6bc7967706174f0476885"} Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.210700 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6770-account-create-7j85m" event={"ID":"51b81319-5b51-4d86-8e0f-d5c955ea145b","Type":"ContainerStarted","Data":"afebca84ab6c1c1e3d8c0560b9c9208bb9e16eb312c71dc49966ba6dd995c6d4"} Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.215744 4730 generic.go:334] "Generic (PLEG): container finished" podID="bc4d7fc4-7053-4985-831a-093ad13a9f45" containerID="d7d350d869da1d66db5b82c41829cc173cb048a4a8c2e2f500f8b1dc0eaecd13" exitCode=0 Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.215813 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f956cddd4-sbtc4" event={"ID":"bc4d7fc4-7053-4985-831a-093ad13a9f45","Type":"ContainerDied","Data":"d7d350d869da1d66db5b82c41829cc173cb048a4a8c2e2f500f8b1dc0eaecd13"} Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.218758 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-beff-account-create-w69vt" event={"ID":"3a4ff200-9ad7-4a4b-afe8-4170071efc67","Type":"ContainerStarted","Data":"fac94bc3f86d8140ae61d87a6b36ac3c2a1b3b9138f78ddf9ea350a486d9b1bc"} Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.218790 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-beff-account-create-w69vt" event={"ID":"3a4ff200-9ad7-4a4b-afe8-4170071efc67","Type":"ContainerStarted","Data":"4a0619d0a30194e3e5260b4c427ffae149a91731855ad50f13743a1902cb0fd3"} Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.244887 4730 scope.go:117] "RemoveContainer" containerID="b6293638e08d3026b482ce36b357ae0b0c6579b9ec2f71b17855a4827d6abd59" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.258670 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.282890 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.283569 4730 scope.go:117] "RemoveContainer" containerID="727470f5d9cb45629f5cba1e64cdd500b9399ac5fcaade575b8c75cfd7ba41c6" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.297806 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-beff-account-create-w69vt" podStartSLOduration=2.297781885 podStartE2EDuration="2.297781885s" podCreationTimestamp="2025-09-30 10:08:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:08:35.259971193 +0000 UTC m=+1159.593231186" watchObservedRunningTime="2025-09-30 10:08:35.297781885 +0000 UTC m=+1159.631041868" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.327861 4730 scope.go:117] "RemoveContainer" containerID="de93152791519a2619ea6d9b80b5f0f2551bb5d198d8b41743918fbfa31c02b2" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.328427 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:08:35 crc kubenswrapper[4730]: E0930 10:08:35.328836 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="ceilometer-central-agent" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.328847 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="ceilometer-central-agent" Sep 30 10:08:35 crc kubenswrapper[4730]: E0930 10:08:35.328861 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="proxy-httpd" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.328868 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="proxy-httpd" Sep 30 10:08:35 crc kubenswrapper[4730]: E0930 10:08:35.328889 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="ceilometer-notification-agent" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.328895 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="ceilometer-notification-agent" Sep 30 10:08:35 crc kubenswrapper[4730]: E0930 10:08:35.328917 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="sg-core" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.328923 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="sg-core" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.329132 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="ceilometer-central-agent" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.329145 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="ceilometer-notification-agent" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.329160 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="proxy-httpd" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.329170 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" containerName="sg-core" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.330792 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.333562 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.335485 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.335646 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.358978 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.366997 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.384661 4730 scope.go:117] "RemoveContainer" containerID="6b0ada1daa83e32043ed7d430e4b903beef8dead9cd949f3bf143561e788b37f" Sep 30 10:08:35 crc kubenswrapper[4730]: E0930 10:08:35.385129 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b0ada1daa83e32043ed7d430e4b903beef8dead9cd949f3bf143561e788b37f\": container with ID starting with 6b0ada1daa83e32043ed7d430e4b903beef8dead9cd949f3bf143561e788b37f not found: ID does not exist" containerID="6b0ada1daa83e32043ed7d430e4b903beef8dead9cd949f3bf143561e788b37f" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.385172 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b0ada1daa83e32043ed7d430e4b903beef8dead9cd949f3bf143561e788b37f"} err="failed to get container status \"6b0ada1daa83e32043ed7d430e4b903beef8dead9cd949f3bf143561e788b37f\": rpc error: code = NotFound desc = could not find container \"6b0ada1daa83e32043ed7d430e4b903beef8dead9cd949f3bf143561e788b37f\": container with ID starting with 6b0ada1daa83e32043ed7d430e4b903beef8dead9cd949f3bf143561e788b37f not found: ID does not exist" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.385201 4730 scope.go:117] "RemoveContainer" containerID="b6293638e08d3026b482ce36b357ae0b0c6579b9ec2f71b17855a4827d6abd59" Sep 30 10:08:35 crc kubenswrapper[4730]: E0930 10:08:35.385752 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6293638e08d3026b482ce36b357ae0b0c6579b9ec2f71b17855a4827d6abd59\": container with ID starting with b6293638e08d3026b482ce36b357ae0b0c6579b9ec2f71b17855a4827d6abd59 not found: ID does not exist" containerID="b6293638e08d3026b482ce36b357ae0b0c6579b9ec2f71b17855a4827d6abd59" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.385789 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6293638e08d3026b482ce36b357ae0b0c6579b9ec2f71b17855a4827d6abd59"} err="failed to get container status \"b6293638e08d3026b482ce36b357ae0b0c6579b9ec2f71b17855a4827d6abd59\": rpc error: code = NotFound desc = could not find container \"b6293638e08d3026b482ce36b357ae0b0c6579b9ec2f71b17855a4827d6abd59\": container with ID starting with b6293638e08d3026b482ce36b357ae0b0c6579b9ec2f71b17855a4827d6abd59 not found: ID does not exist" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.385815 4730 scope.go:117] "RemoveContainer" containerID="727470f5d9cb45629f5cba1e64cdd500b9399ac5fcaade575b8c75cfd7ba41c6" Sep 30 10:08:35 crc kubenswrapper[4730]: E0930 10:08:35.386049 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"727470f5d9cb45629f5cba1e64cdd500b9399ac5fcaade575b8c75cfd7ba41c6\": container with ID starting with 727470f5d9cb45629f5cba1e64cdd500b9399ac5fcaade575b8c75cfd7ba41c6 not found: ID does not exist" containerID="727470f5d9cb45629f5cba1e64cdd500b9399ac5fcaade575b8c75cfd7ba41c6" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.386076 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"727470f5d9cb45629f5cba1e64cdd500b9399ac5fcaade575b8c75cfd7ba41c6"} err="failed to get container status \"727470f5d9cb45629f5cba1e64cdd500b9399ac5fcaade575b8c75cfd7ba41c6\": rpc error: code = NotFound desc = could not find container \"727470f5d9cb45629f5cba1e64cdd500b9399ac5fcaade575b8c75cfd7ba41c6\": container with ID starting with 727470f5d9cb45629f5cba1e64cdd500b9399ac5fcaade575b8c75cfd7ba41c6 not found: ID does not exist" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.386089 4730 scope.go:117] "RemoveContainer" containerID="de93152791519a2619ea6d9b80b5f0f2551bb5d198d8b41743918fbfa31c02b2" Sep 30 10:08:35 crc kubenswrapper[4730]: E0930 10:08:35.386262 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de93152791519a2619ea6d9b80b5f0f2551bb5d198d8b41743918fbfa31c02b2\": container with ID starting with de93152791519a2619ea6d9b80b5f0f2551bb5d198d8b41743918fbfa31c02b2 not found: ID does not exist" containerID="de93152791519a2619ea6d9b80b5f0f2551bb5d198d8b41743918fbfa31c02b2" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.386281 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de93152791519a2619ea6d9b80b5f0f2551bb5d198d8b41743918fbfa31c02b2"} err="failed to get container status \"de93152791519a2619ea6d9b80b5f0f2551bb5d198d8b41743918fbfa31c02b2\": rpc error: code = NotFound desc = could not find container \"de93152791519a2619ea6d9b80b5f0f2551bb5d198d8b41743918fbfa31c02b2\": container with ID starting with de93152791519a2619ea6d9b80b5f0f2551bb5d198d8b41743918fbfa31c02b2 not found: ID does not exist" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.414325 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-combined-ca-bundle\") pod \"bc4d7fc4-7053-4985-831a-093ad13a9f45\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.414677 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9qx7\" (UniqueName: \"kubernetes.io/projected/bc4d7fc4-7053-4985-831a-093ad13a9f45-kube-api-access-z9qx7\") pod \"bc4d7fc4-7053-4985-831a-093ad13a9f45\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.414795 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-httpd-config\") pod \"bc4d7fc4-7053-4985-831a-093ad13a9f45\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.414857 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-ovndb-tls-certs\") pod \"bc4d7fc4-7053-4985-831a-093ad13a9f45\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.414888 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-config\") pod \"bc4d7fc4-7053-4985-831a-093ad13a9f45\" (UID: \"bc4d7fc4-7053-4985-831a-093ad13a9f45\") " Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.415167 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.415208 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-log-httpd\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.415259 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.415303 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.415367 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-scripts\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.415386 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-run-httpd\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.415415 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-config-data\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.415453 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6ljw\" (UniqueName: \"kubernetes.io/projected/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-kube-api-access-s6ljw\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.421437 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc4d7fc4-7053-4985-831a-093ad13a9f45-kube-api-access-z9qx7" (OuterVolumeSpecName: "kube-api-access-z9qx7") pod "bc4d7fc4-7053-4985-831a-093ad13a9f45" (UID: "bc4d7fc4-7053-4985-831a-093ad13a9f45"). InnerVolumeSpecName "kube-api-access-z9qx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.421747 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "bc4d7fc4-7053-4985-831a-093ad13a9f45" (UID: "bc4d7fc4-7053-4985-831a-093ad13a9f45"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.491406 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bc4d7fc4-7053-4985-831a-093ad13a9f45" (UID: "bc4d7fc4-7053-4985-831a-093ad13a9f45"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.514810 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-config" (OuterVolumeSpecName: "config") pod "bc4d7fc4-7053-4985-831a-093ad13a9f45" (UID: "bc4d7fc4-7053-4985-831a-093ad13a9f45"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.517465 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.517532 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.517602 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-scripts\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.517637 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-run-httpd\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.517660 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-config-data\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.517864 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6ljw\" (UniqueName: \"kubernetes.io/projected/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-kube-api-access-s6ljw\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.517966 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.517987 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-log-httpd\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.518041 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9qx7\" (UniqueName: \"kubernetes.io/projected/bc4d7fc4-7053-4985-831a-093ad13a9f45-kube-api-access-z9qx7\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.518052 4730 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.518061 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.518070 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.518644 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-log-httpd\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.518825 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-run-httpd\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.520875 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-scripts\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.522244 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.522509 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-config-data\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.524070 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.537817 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.544168 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6ljw\" (UniqueName: \"kubernetes.io/projected/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-kube-api-access-s6ljw\") pod \"ceilometer-0\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " pod="openstack/ceilometer-0" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.599835 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "bc4d7fc4-7053-4985-831a-093ad13a9f45" (UID: "bc4d7fc4-7053-4985-831a-093ad13a9f45"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.620327 4730 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc4d7fc4-7053-4985-831a-093ad13a9f45-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:35 crc kubenswrapper[4730]: I0930 10:08:35.659719 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.205353 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.229165 4730 generic.go:334] "Generic (PLEG): container finished" podID="3a4ff200-9ad7-4a4b-afe8-4170071efc67" containerID="fac94bc3f86d8140ae61d87a6b36ac3c2a1b3b9138f78ddf9ea350a486d9b1bc" exitCode=0 Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.229218 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-beff-account-create-w69vt" event={"ID":"3a4ff200-9ad7-4a4b-afe8-4170071efc67","Type":"ContainerDied","Data":"fac94bc3f86d8140ae61d87a6b36ac3c2a1b3b9138f78ddf9ea350a486d9b1bc"} Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.233302 4730 generic.go:334] "Generic (PLEG): container finished" podID="7070bd97-620f-4ad8-b7c1-044902ae6857" containerID="4ec9ea9ccb81335a48818cd75925bbf3eff5c762c81e52bd38adfb2860ee7f80" exitCode=0 Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.233383 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5004-account-create-kzhh4" event={"ID":"7070bd97-620f-4ad8-b7c1-044902ae6857","Type":"ContainerDied","Data":"4ec9ea9ccb81335a48818cd75925bbf3eff5c762c81e52bd38adfb2860ee7f80"} Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.234950 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5ac3d92-1949-4134-9a3a-5af8b1f7e914","Type":"ContainerStarted","Data":"3af35b2475e4c295f647d78d2d290bc5a90dc9ce3eeaa351d28d7aa49fe2555d"} Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.238014 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f956cddd4-sbtc4" Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.242702 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f956cddd4-sbtc4" event={"ID":"bc4d7fc4-7053-4985-831a-093ad13a9f45","Type":"ContainerDied","Data":"116bdb07cc5d1cc17eb4a98e4aa51ba0f7d9a5d5461d6cf4cdcb647e26df66b6"} Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.242767 4730 scope.go:117] "RemoveContainer" containerID="809eeb547f49f15792831ab80ae1ae4ab3eceb9263a950f4d87aaa5556072bf2" Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.287480 4730 scope.go:117] "RemoveContainer" containerID="d7d350d869da1d66db5b82c41829cc173cb048a4a8c2e2f500f8b1dc0eaecd13" Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.327763 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-f956cddd4-sbtc4"] Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.346299 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-f956cddd4-sbtc4"] Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.416793 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc4d7fc4-7053-4985-831a-093ad13a9f45" path="/var/lib/kubelet/pods/bc4d7fc4-7053-4985-831a-093ad13a9f45/volumes" Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.417602 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6985e94-3efc-48db-8690-cbf6a02d1047" path="/var/lib/kubelet/pods/d6985e94-3efc-48db-8690-cbf6a02d1047/volumes" Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.906996 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6770-account-create-7j85m" Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.924929 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.958635 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwlsb\" (UniqueName: \"kubernetes.io/projected/51b81319-5b51-4d86-8e0f-d5c955ea145b-kube-api-access-qwlsb\") pod \"51b81319-5b51-4d86-8e0f-d5c955ea145b\" (UID: \"51b81319-5b51-4d86-8e0f-d5c955ea145b\") " Sep 30 10:08:36 crc kubenswrapper[4730]: I0930 10:08:36.963120 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51b81319-5b51-4d86-8e0f-d5c955ea145b-kube-api-access-qwlsb" (OuterVolumeSpecName: "kube-api-access-qwlsb") pod "51b81319-5b51-4d86-8e0f-d5c955ea145b" (UID: "51b81319-5b51-4d86-8e0f-d5c955ea145b"). InnerVolumeSpecName "kube-api-access-qwlsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:37 crc kubenswrapper[4730]: I0930 10:08:37.061773 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwlsb\" (UniqueName: \"kubernetes.io/projected/51b81319-5b51-4d86-8e0f-d5c955ea145b-kube-api-access-qwlsb\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:37 crc kubenswrapper[4730]: I0930 10:08:37.258320 4730 generic.go:334] "Generic (PLEG): container finished" podID="7df8ec89-cd12-419e-83a2-ca0d04205f41" containerID="6b24b471a3235e8d01a54393e74a1e7397d0deab97d3c165bd9fdc5d5aa38e4c" exitCode=0 Sep 30 10:08:37 crc kubenswrapper[4730]: I0930 10:08:37.258663 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7df8ec89-cd12-419e-83a2-ca0d04205f41","Type":"ContainerDied","Data":"6b24b471a3235e8d01a54393e74a1e7397d0deab97d3c165bd9fdc5d5aa38e4c"} Sep 30 10:08:37 crc kubenswrapper[4730]: I0930 10:08:37.261483 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6770-account-create-7j85m" event={"ID":"51b81319-5b51-4d86-8e0f-d5c955ea145b","Type":"ContainerDied","Data":"afebca84ab6c1c1e3d8c0560b9c9208bb9e16eb312c71dc49966ba6dd995c6d4"} Sep 30 10:08:37 crc kubenswrapper[4730]: I0930 10:08:37.261541 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="afebca84ab6c1c1e3d8c0560b9c9208bb9e16eb312c71dc49966ba6dd995c6d4" Sep 30 10:08:37 crc kubenswrapper[4730]: I0930 10:08:37.261662 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6770-account-create-7j85m" Sep 30 10:08:37 crc kubenswrapper[4730]: I0930 10:08:37.304013 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5ac3d92-1949-4134-9a3a-5af8b1f7e914","Type":"ContainerStarted","Data":"844f44e16d47ade238eaa16fbf705825e6fbec375c0482455fd5f3cea4f93115"} Sep 30 10:08:37 crc kubenswrapper[4730]: I0930 10:08:37.304183 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5ac3d92-1949-4134-9a3a-5af8b1f7e914","Type":"ContainerStarted","Data":"69fe5c3ded1eca016dd4027e908051722cdaf6f52c516c8a1c417dc9f3641ad2"} Sep 30 10:08:37 crc kubenswrapper[4730]: I0930 10:08:37.974999 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 10:08:37 crc kubenswrapper[4730]: I0930 10:08:37.981091 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-beff-account-create-w69vt" Sep 30 10:08:37 crc kubenswrapper[4730]: I0930 10:08:37.988756 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5004-account-create-kzhh4" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.115857 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jggc6\" (UniqueName: \"kubernetes.io/projected/3a4ff200-9ad7-4a4b-afe8-4170071efc67-kube-api-access-jggc6\") pod \"3a4ff200-9ad7-4a4b-afe8-4170071efc67\" (UID: \"3a4ff200-9ad7-4a4b-afe8-4170071efc67\") " Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.115955 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ww6w4\" (UniqueName: \"kubernetes.io/projected/7070bd97-620f-4ad8-b7c1-044902ae6857-kube-api-access-ww6w4\") pod \"7070bd97-620f-4ad8-b7c1-044902ae6857\" (UID: \"7070bd97-620f-4ad8-b7c1-044902ae6857\") " Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.116027 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-combined-ca-bundle\") pod \"7df8ec89-cd12-419e-83a2-ca0d04205f41\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.116105 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-config-data-custom\") pod \"7df8ec89-cd12-419e-83a2-ca0d04205f41\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.116140 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-config-data\") pod \"7df8ec89-cd12-419e-83a2-ca0d04205f41\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.116169 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwtq4\" (UniqueName: \"kubernetes.io/projected/7df8ec89-cd12-419e-83a2-ca0d04205f41-kube-api-access-gwtq4\") pod \"7df8ec89-cd12-419e-83a2-ca0d04205f41\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.116317 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-scripts\") pod \"7df8ec89-cd12-419e-83a2-ca0d04205f41\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.116356 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7df8ec89-cd12-419e-83a2-ca0d04205f41-etc-machine-id\") pod \"7df8ec89-cd12-419e-83a2-ca0d04205f41\" (UID: \"7df8ec89-cd12-419e-83a2-ca0d04205f41\") " Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.116974 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df8ec89-cd12-419e-83a2-ca0d04205f41-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7df8ec89-cd12-419e-83a2-ca0d04205f41" (UID: "7df8ec89-cd12-419e-83a2-ca0d04205f41"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.122818 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a4ff200-9ad7-4a4b-afe8-4170071efc67-kube-api-access-jggc6" (OuterVolumeSpecName: "kube-api-access-jggc6") pod "3a4ff200-9ad7-4a4b-afe8-4170071efc67" (UID: "3a4ff200-9ad7-4a4b-afe8-4170071efc67"). InnerVolumeSpecName "kube-api-access-jggc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.123359 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7df8ec89-cd12-419e-83a2-ca0d04205f41" (UID: "7df8ec89-cd12-419e-83a2-ca0d04205f41"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.124250 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7070bd97-620f-4ad8-b7c1-044902ae6857-kube-api-access-ww6w4" (OuterVolumeSpecName: "kube-api-access-ww6w4") pod "7070bd97-620f-4ad8-b7c1-044902ae6857" (UID: "7070bd97-620f-4ad8-b7c1-044902ae6857"). InnerVolumeSpecName "kube-api-access-ww6w4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.124270 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-scripts" (OuterVolumeSpecName: "scripts") pod "7df8ec89-cd12-419e-83a2-ca0d04205f41" (UID: "7df8ec89-cd12-419e-83a2-ca0d04205f41"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.127494 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7df8ec89-cd12-419e-83a2-ca0d04205f41-kube-api-access-gwtq4" (OuterVolumeSpecName: "kube-api-access-gwtq4") pod "7df8ec89-cd12-419e-83a2-ca0d04205f41" (UID: "7df8ec89-cd12-419e-83a2-ca0d04205f41"). InnerVolumeSpecName "kube-api-access-gwtq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.185949 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7df8ec89-cd12-419e-83a2-ca0d04205f41" (UID: "7df8ec89-cd12-419e-83a2-ca0d04205f41"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.218763 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.218801 4730 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7df8ec89-cd12-419e-83a2-ca0d04205f41-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.218816 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jggc6\" (UniqueName: \"kubernetes.io/projected/3a4ff200-9ad7-4a4b-afe8-4170071efc67-kube-api-access-jggc6\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.218831 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ww6w4\" (UniqueName: \"kubernetes.io/projected/7070bd97-620f-4ad8-b7c1-044902ae6857-kube-api-access-ww6w4\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.218842 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.218853 4730 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.218864 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwtq4\" (UniqueName: \"kubernetes.io/projected/7df8ec89-cd12-419e-83a2-ca0d04205f41-kube-api-access-gwtq4\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.228953 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-config-data" (OuterVolumeSpecName: "config-data") pod "7df8ec89-cd12-419e-83a2-ca0d04205f41" (UID: "7df8ec89-cd12-419e-83a2-ca0d04205f41"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.314205 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.314234 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7df8ec89-cd12-419e-83a2-ca0d04205f41","Type":"ContainerDied","Data":"a284e9a2d69cfbf635980e37d0e7c57c7f758b0b6240a1a5203c6eea32c3a953"} Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.314302 4730 scope.go:117] "RemoveContainer" containerID="0b2b9d608b6600aa9fc53e605ebac1990a1916ee85de3e98c9592e5d2e3c6786" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.316036 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-beff-account-create-w69vt" event={"ID":"3a4ff200-9ad7-4a4b-afe8-4170071efc67","Type":"ContainerDied","Data":"4a0619d0a30194e3e5260b4c427ffae149a91731855ad50f13743a1902cb0fd3"} Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.316073 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a0619d0a30194e3e5260b4c427ffae149a91731855ad50f13743a1902cb0fd3" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.316129 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-beff-account-create-w69vt" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.320879 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7df8ec89-cd12-419e-83a2-ca0d04205f41-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.322784 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5004-account-create-kzhh4" event={"ID":"7070bd97-620f-4ad8-b7c1-044902ae6857","Type":"ContainerDied","Data":"878cd673eacc91a733120a98aa617a9c978a69bd3c1808e58ea12d6969300336"} Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.322821 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="878cd673eacc91a733120a98aa617a9c978a69bd3c1808e58ea12d6969300336" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.322875 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5004-account-create-kzhh4" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.333139 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5ac3d92-1949-4134-9a3a-5af8b1f7e914","Type":"ContainerStarted","Data":"d9bfcc9619408e33498588ed815ff2fc611550d01c30b045483b4c15d4f42554"} Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.342727 4730 scope.go:117] "RemoveContainer" containerID="6b24b471a3235e8d01a54393e74a1e7397d0deab97d3c165bd9fdc5d5aa38e4c" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.359308 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.392580 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.396940 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 10:08:38 crc kubenswrapper[4730]: E0930 10:08:38.397444 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df8ec89-cd12-419e-83a2-ca0d04205f41" containerName="cinder-scheduler" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397473 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df8ec89-cd12-419e-83a2-ca0d04205f41" containerName="cinder-scheduler" Sep 30 10:08:38 crc kubenswrapper[4730]: E0930 10:08:38.397498 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc4d7fc4-7053-4985-831a-093ad13a9f45" containerName="neutron-httpd" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397506 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc4d7fc4-7053-4985-831a-093ad13a9f45" containerName="neutron-httpd" Sep 30 10:08:38 crc kubenswrapper[4730]: E0930 10:08:38.397524 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7070bd97-620f-4ad8-b7c1-044902ae6857" containerName="mariadb-account-create" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397532 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="7070bd97-620f-4ad8-b7c1-044902ae6857" containerName="mariadb-account-create" Sep 30 10:08:38 crc kubenswrapper[4730]: E0930 10:08:38.397550 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a4ff200-9ad7-4a4b-afe8-4170071efc67" containerName="mariadb-account-create" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397559 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a4ff200-9ad7-4a4b-afe8-4170071efc67" containerName="mariadb-account-create" Sep 30 10:08:38 crc kubenswrapper[4730]: E0930 10:08:38.397570 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df8ec89-cd12-419e-83a2-ca0d04205f41" containerName="probe" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397579 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df8ec89-cd12-419e-83a2-ca0d04205f41" containerName="probe" Sep 30 10:08:38 crc kubenswrapper[4730]: E0930 10:08:38.397596 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc4d7fc4-7053-4985-831a-093ad13a9f45" containerName="neutron-api" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397623 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc4d7fc4-7053-4985-831a-093ad13a9f45" containerName="neutron-api" Sep 30 10:08:38 crc kubenswrapper[4730]: E0930 10:08:38.397658 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51b81319-5b51-4d86-8e0f-d5c955ea145b" containerName="mariadb-account-create" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397675 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="51b81319-5b51-4d86-8e0f-d5c955ea145b" containerName="mariadb-account-create" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397880 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df8ec89-cd12-419e-83a2-ca0d04205f41" containerName="probe" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397894 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df8ec89-cd12-419e-83a2-ca0d04205f41" containerName="cinder-scheduler" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397906 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc4d7fc4-7053-4985-831a-093ad13a9f45" containerName="neutron-httpd" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397925 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="51b81319-5b51-4d86-8e0f-d5c955ea145b" containerName="mariadb-account-create" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397937 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="7070bd97-620f-4ad8-b7c1-044902ae6857" containerName="mariadb-account-create" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397949 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a4ff200-9ad7-4a4b-afe8-4170071efc67" containerName="mariadb-account-create" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.397968 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc4d7fc4-7053-4985-831a-093ad13a9f45" containerName="neutron-api" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.399147 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.401786 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.407653 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.525642 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.525683 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.525732 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-config-data\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.525851 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-scripts\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.525929 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frktc\" (UniqueName: \"kubernetes.io/projected/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-kube-api-access-frktc\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.526068 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.627861 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.628153 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.628291 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-config-data\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.628400 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-scripts\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.628003 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.628521 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frktc\" (UniqueName: \"kubernetes.io/projected/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-kube-api-access-frktc\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.628801 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.632325 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-scripts\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.632329 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.633373 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.635442 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-config-data\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.652375 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frktc\" (UniqueName: \"kubernetes.io/projected/a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09-kube-api-access-frktc\") pod \"cinder-scheduler-0\" (UID: \"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09\") " pod="openstack/cinder-scheduler-0" Sep 30 10:08:38 crc kubenswrapper[4730]: I0930 10:08:38.719816 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 10:08:39 crc kubenswrapper[4730]: I0930 10:08:39.244078 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 10:08:39 crc kubenswrapper[4730]: I0930 10:08:39.346082 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09","Type":"ContainerStarted","Data":"453a11dbdcce24f3b20ab476be3687fada6350727bca44466a9507eb90594849"} Sep 30 10:08:39 crc kubenswrapper[4730]: I0930 10:08:39.351122 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5ac3d92-1949-4134-9a3a-5af8b1f7e914","Type":"ContainerStarted","Data":"613f433bc3cf1bdae9ee73f9ee0e198e04c95729522f6ca786579e739a7afab1"} Sep 30 10:08:39 crc kubenswrapper[4730]: I0930 10:08:39.351247 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="ceilometer-central-agent" containerID="cri-o://69fe5c3ded1eca016dd4027e908051722cdaf6f52c516c8a1c417dc9f3641ad2" gracePeriod=30 Sep 30 10:08:39 crc kubenswrapper[4730]: I0930 10:08:39.351349 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="proxy-httpd" containerID="cri-o://613f433bc3cf1bdae9ee73f9ee0e198e04c95729522f6ca786579e739a7afab1" gracePeriod=30 Sep 30 10:08:39 crc kubenswrapper[4730]: I0930 10:08:39.351405 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="sg-core" containerID="cri-o://d9bfcc9619408e33498588ed815ff2fc611550d01c30b045483b4c15d4f42554" gracePeriod=30 Sep 30 10:08:39 crc kubenswrapper[4730]: I0930 10:08:39.351436 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="ceilometer-notification-agent" containerID="cri-o://844f44e16d47ade238eaa16fbf705825e6fbec375c0482455fd5f3cea4f93115" gracePeriod=30 Sep 30 10:08:39 crc kubenswrapper[4730]: I0930 10:08:39.351776 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 10:08:39 crc kubenswrapper[4730]: I0930 10:08:39.396256 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.509961821 podStartE2EDuration="4.396238164s" podCreationTimestamp="2025-09-30 10:08:35 +0000 UTC" firstStartedPulling="2025-09-30 10:08:36.206829156 +0000 UTC m=+1160.540089149" lastFinishedPulling="2025-09-30 10:08:39.093105499 +0000 UTC m=+1163.426365492" observedRunningTime="2025-09-30 10:08:39.378428278 +0000 UTC m=+1163.711688271" watchObservedRunningTime="2025-09-30 10:08:39.396238164 +0000 UTC m=+1163.729498157" Sep 30 10:08:39 crc kubenswrapper[4730]: I0930 10:08:39.484926 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 10:08:40 crc kubenswrapper[4730]: I0930 10:08:40.363172 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09","Type":"ContainerStarted","Data":"8b6723935208490b8081f570f931848aac5edefac0992c28684815302e88ae0b"} Sep 30 10:08:40 crc kubenswrapper[4730]: I0930 10:08:40.371725 4730 generic.go:334] "Generic (PLEG): container finished" podID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerID="d9bfcc9619408e33498588ed815ff2fc611550d01c30b045483b4c15d4f42554" exitCode=2 Sep 30 10:08:40 crc kubenswrapper[4730]: I0930 10:08:40.371761 4730 generic.go:334] "Generic (PLEG): container finished" podID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerID="844f44e16d47ade238eaa16fbf705825e6fbec375c0482455fd5f3cea4f93115" exitCode=0 Sep 30 10:08:40 crc kubenswrapper[4730]: I0930 10:08:40.371783 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5ac3d92-1949-4134-9a3a-5af8b1f7e914","Type":"ContainerDied","Data":"d9bfcc9619408e33498588ed815ff2fc611550d01c30b045483b4c15d4f42554"} Sep 30 10:08:40 crc kubenswrapper[4730]: I0930 10:08:40.371808 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5ac3d92-1949-4134-9a3a-5af8b1f7e914","Type":"ContainerDied","Data":"844f44e16d47ade238eaa16fbf705825e6fbec375c0482455fd5f3cea4f93115"} Sep 30 10:08:40 crc kubenswrapper[4730]: I0930 10:08:40.400277 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7df8ec89-cd12-419e-83a2-ca0d04205f41" path="/var/lib/kubelet/pods/7df8ec89-cd12-419e-83a2-ca0d04205f41/volumes" Sep 30 10:08:41 crc kubenswrapper[4730]: I0930 10:08:41.386835 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09","Type":"ContainerStarted","Data":"706cfa4c1d209538203f60bea3b5fccf8407bd4e49373af942df6c56611e0758"} Sep 30 10:08:41 crc kubenswrapper[4730]: I0930 10:08:41.392437 4730 generic.go:334] "Generic (PLEG): container finished" podID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerID="69fe5c3ded1eca016dd4027e908051722cdaf6f52c516c8a1c417dc9f3641ad2" exitCode=0 Sep 30 10:08:41 crc kubenswrapper[4730]: I0930 10:08:41.392486 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5ac3d92-1949-4134-9a3a-5af8b1f7e914","Type":"ContainerDied","Data":"69fe5c3ded1eca016dd4027e908051722cdaf6f52c516c8a1c417dc9f3641ad2"} Sep 30 10:08:41 crc kubenswrapper[4730]: I0930 10:08:41.410289 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.410255349 podStartE2EDuration="3.410255349s" podCreationTimestamp="2025-09-30 10:08:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:08:41.405120036 +0000 UTC m=+1165.738380029" watchObservedRunningTime="2025-09-30 10:08:41.410255349 +0000 UTC m=+1165.743515342" Sep 30 10:08:41 crc kubenswrapper[4730]: I0930 10:08:41.599176 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 30 10:08:43 crc kubenswrapper[4730]: I0930 10:08:43.720800 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.266732 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-f7qp5"] Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.267959 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.270367 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-cfjds" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.270367 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.270461 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.279470 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-f7qp5"] Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.338539 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-f7qp5\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.338594 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-config-data\") pod \"nova-cell0-conductor-db-sync-f7qp5\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.338718 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbvwh\" (UniqueName: \"kubernetes.io/projected/baa411cd-87b0-4467-af6d-9a64df6f75b9-kube-api-access-bbvwh\") pod \"nova-cell0-conductor-db-sync-f7qp5\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.338750 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-scripts\") pod \"nova-cell0-conductor-db-sync-f7qp5\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.381549 4730 scope.go:117] "RemoveContainer" containerID="767915a10d018b40c18d12dc3c1d3b41568181e4b1ea08a89ae112a458629c45" Sep 30 10:08:44 crc kubenswrapper[4730]: E0930 10:08:44.381883 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"watcher-decision-engine\" with CrashLoopBackOff: \"back-off 20s restarting failed container=watcher-decision-engine pod=watcher-decision-engine-0_openstack(a4f9bd21-5f86-4443-87be-eadb5d1c77f9)\"" pod="openstack/watcher-decision-engine-0" podUID="a4f9bd21-5f86-4443-87be-eadb5d1c77f9" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.440431 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-f7qp5\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.440555 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-config-data\") pod \"nova-cell0-conductor-db-sync-f7qp5\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.440712 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbvwh\" (UniqueName: \"kubernetes.io/projected/baa411cd-87b0-4467-af6d-9a64df6f75b9-kube-api-access-bbvwh\") pod \"nova-cell0-conductor-db-sync-f7qp5\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.440763 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-scripts\") pod \"nova-cell0-conductor-db-sync-f7qp5\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.448545 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-scripts\") pod \"nova-cell0-conductor-db-sync-f7qp5\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.464311 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-f7qp5\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.469475 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-config-data\") pod \"nova-cell0-conductor-db-sync-f7qp5\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.483299 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbvwh\" (UniqueName: \"kubernetes.io/projected/baa411cd-87b0-4467-af6d-9a64df6f75b9-kube-api-access-bbvwh\") pod \"nova-cell0-conductor-db-sync-f7qp5\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:44 crc kubenswrapper[4730]: I0930 10:08:44.607097 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:08:45 crc kubenswrapper[4730]: I0930 10:08:45.154422 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-f7qp5"] Sep 30 10:08:45 crc kubenswrapper[4730]: I0930 10:08:45.439462 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-f7qp5" event={"ID":"baa411cd-87b0-4467-af6d-9a64df6f75b9","Type":"ContainerStarted","Data":"d0461d8b348e749b9f000889e777bf06e4e0573407815f3f628c5a4ae7427ad8"} Sep 30 10:08:48 crc kubenswrapper[4730]: I0930 10:08:48.871302 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 10:08:52 crc kubenswrapper[4730]: I0930 10:08:52.993578 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 30 10:08:52 crc kubenswrapper[4730]: I0930 10:08:52.994175 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Sep 30 10:08:52 crc kubenswrapper[4730]: I0930 10:08:52.994978 4730 scope.go:117] "RemoveContainer" containerID="767915a10d018b40c18d12dc3c1d3b41568181e4b1ea08a89ae112a458629c45" Sep 30 10:08:54 crc kubenswrapper[4730]: I0930 10:08:54.547560 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/watcher-decision-engine-0" event={"ID":"a4f9bd21-5f86-4443-87be-eadb5d1c77f9","Type":"ContainerStarted","Data":"def8857b664de3faf290800834b691e7c3c9a4a867d7dfe819559b5b39c6ad74"} Sep 30 10:08:57 crc kubenswrapper[4730]: I0930 10:08:57.580838 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-f7qp5" event={"ID":"baa411cd-87b0-4467-af6d-9a64df6f75b9","Type":"ContainerStarted","Data":"70a5681709d25a447d57a454fe9ae0deb302be92897371ffa057cbc9c6e37114"} Sep 30 10:08:57 crc kubenswrapper[4730]: I0930 10:08:57.611603 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-f7qp5" podStartSLOduration=1.80387582 podStartE2EDuration="13.611582158s" podCreationTimestamp="2025-09-30 10:08:44 +0000 UTC" firstStartedPulling="2025-09-30 10:08:45.169010293 +0000 UTC m=+1169.502270286" lastFinishedPulling="2025-09-30 10:08:56.976716631 +0000 UTC m=+1181.309976624" observedRunningTime="2025-09-30 10:08:57.605072668 +0000 UTC m=+1181.938332671" watchObservedRunningTime="2025-09-30 10:08:57.611582158 +0000 UTC m=+1181.944842161" Sep 30 10:09:02 crc kubenswrapper[4730]: I0930 10:09:02.993472 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/watcher-decision-engine-0" Sep 30 10:09:03 crc kubenswrapper[4730]: I0930 10:09:03.027826 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/watcher-decision-engine-0" Sep 30 10:09:03 crc kubenswrapper[4730]: I0930 10:09:03.641921 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/watcher-decision-engine-0" Sep 30 10:09:03 crc kubenswrapper[4730]: I0930 10:09:03.668196 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/watcher-decision-engine-0" Sep 30 10:09:05 crc kubenswrapper[4730]: I0930 10:09:05.670127 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 10:09:09 crc kubenswrapper[4730]: I0930 10:09:09.703891 4730 generic.go:334] "Generic (PLEG): container finished" podID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerID="613f433bc3cf1bdae9ee73f9ee0e198e04c95729522f6ca786579e739a7afab1" exitCode=137 Sep 30 10:09:09 crc kubenswrapper[4730]: I0930 10:09:09.703981 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5ac3d92-1949-4134-9a3a-5af8b1f7e914","Type":"ContainerDied","Data":"613f433bc3cf1bdae9ee73f9ee0e198e04c95729522f6ca786579e739a7afab1"} Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.316749 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.396963 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-scripts\") pod \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.397040 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6ljw\" (UniqueName: \"kubernetes.io/projected/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-kube-api-access-s6ljw\") pod \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.397117 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-sg-core-conf-yaml\") pod \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.397166 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-config-data\") pod \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.397276 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-run-httpd\") pod \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.397296 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-combined-ca-bundle\") pod \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.397318 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-ceilometer-tls-certs\") pod \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.397358 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-log-httpd\") pod \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\" (UID: \"f5ac3d92-1949-4134-9a3a-5af8b1f7e914\") " Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.397667 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f5ac3d92-1949-4134-9a3a-5af8b1f7e914" (UID: "f5ac3d92-1949-4134-9a3a-5af8b1f7e914"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.397806 4730 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.398818 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f5ac3d92-1949-4134-9a3a-5af8b1f7e914" (UID: "f5ac3d92-1949-4134-9a3a-5af8b1f7e914"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.413868 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-scripts" (OuterVolumeSpecName: "scripts") pod "f5ac3d92-1949-4134-9a3a-5af8b1f7e914" (UID: "f5ac3d92-1949-4134-9a3a-5af8b1f7e914"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.417120 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-kube-api-access-s6ljw" (OuterVolumeSpecName: "kube-api-access-s6ljw") pod "f5ac3d92-1949-4134-9a3a-5af8b1f7e914" (UID: "f5ac3d92-1949-4134-9a3a-5af8b1f7e914"). InnerVolumeSpecName "kube-api-access-s6ljw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.440322 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f5ac3d92-1949-4134-9a3a-5af8b1f7e914" (UID: "f5ac3d92-1949-4134-9a3a-5af8b1f7e914"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.459730 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "f5ac3d92-1949-4134-9a3a-5af8b1f7e914" (UID: "f5ac3d92-1949-4134-9a3a-5af8b1f7e914"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.472894 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5ac3d92-1949-4134-9a3a-5af8b1f7e914" (UID: "f5ac3d92-1949-4134-9a3a-5af8b1f7e914"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.500528 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.500564 4730 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.501868 4730 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.501884 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.501897 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6ljw\" (UniqueName: \"kubernetes.io/projected/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-kube-api-access-s6ljw\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.501910 4730 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.504594 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-config-data" (OuterVolumeSpecName: "config-data") pod "f5ac3d92-1949-4134-9a3a-5af8b1f7e914" (UID: "f5ac3d92-1949-4134-9a3a-5af8b1f7e914"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.604414 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5ac3d92-1949-4134-9a3a-5af8b1f7e914-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.716884 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f5ac3d92-1949-4134-9a3a-5af8b1f7e914","Type":"ContainerDied","Data":"3af35b2475e4c295f647d78d2d290bc5a90dc9ce3eeaa351d28d7aa49fe2555d"} Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.716953 4730 scope.go:117] "RemoveContainer" containerID="613f433bc3cf1bdae9ee73f9ee0e198e04c95729522f6ca786579e739a7afab1" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.716999 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.752549 4730 scope.go:117] "RemoveContainer" containerID="d9bfcc9619408e33498588ed815ff2fc611550d01c30b045483b4c15d4f42554" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.786075 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.804217 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.809593 4730 scope.go:117] "RemoveContainer" containerID="844f44e16d47ade238eaa16fbf705825e6fbec375c0482455fd5f3cea4f93115" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.823811 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:09:10 crc kubenswrapper[4730]: E0930 10:09:10.824192 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="sg-core" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.824210 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="sg-core" Sep 30 10:09:10 crc kubenswrapper[4730]: E0930 10:09:10.824226 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="ceilometer-central-agent" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.824232 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="ceilometer-central-agent" Sep 30 10:09:10 crc kubenswrapper[4730]: E0930 10:09:10.824255 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="ceilometer-notification-agent" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.824261 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="ceilometer-notification-agent" Sep 30 10:09:10 crc kubenswrapper[4730]: E0930 10:09:10.824275 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="proxy-httpd" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.824281 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="proxy-httpd" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.824446 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="sg-core" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.824459 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="ceilometer-notification-agent" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.824470 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="proxy-httpd" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.824481 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" containerName="ceilometer-central-agent" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.827061 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.831403 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.831496 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.832057 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.833543 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.841365 4730 scope.go:117] "RemoveContainer" containerID="69fe5c3ded1eca016dd4027e908051722cdaf6f52c516c8a1c417dc9f3641ad2" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.909919 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcmbh\" (UniqueName: \"kubernetes.io/projected/994beb56-6886-40ad-88f9-98e66828cafd-kube-api-access-tcmbh\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.910486 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.910686 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.910816 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/994beb56-6886-40ad-88f9-98e66828cafd-run-httpd\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.910994 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-scripts\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.911166 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-config-data\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.911364 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:10 crc kubenswrapper[4730]: I0930 10:09:10.911512 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/994beb56-6886-40ad-88f9-98e66828cafd-log-httpd\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.012740 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.012989 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/994beb56-6886-40ad-88f9-98e66828cafd-log-httpd\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.013140 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcmbh\" (UniqueName: \"kubernetes.io/projected/994beb56-6886-40ad-88f9-98e66828cafd-kube-api-access-tcmbh\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.013230 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.013330 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.013412 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/994beb56-6886-40ad-88f9-98e66828cafd-run-httpd\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.013488 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-scripts\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.013562 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-config-data\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.013809 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/994beb56-6886-40ad-88f9-98e66828cafd-log-httpd\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.013878 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/994beb56-6886-40ad-88f9-98e66828cafd-run-httpd\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.018110 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-scripts\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.018163 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.018526 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.027897 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.034340 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcmbh\" (UniqueName: \"kubernetes.io/projected/994beb56-6886-40ad-88f9-98e66828cafd-kube-api-access-tcmbh\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.040767 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-config-data\") pod \"ceilometer-0\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.155396 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.642514 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:09:11 crc kubenswrapper[4730]: W0930 10:09:11.654648 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod994beb56_6886_40ad_88f9_98e66828cafd.slice/crio-621737f982e8b581cc91b698cfbf5fbae1ccd0deef2c4bd6066bb48d9a736d0f WatchSource:0}: Error finding container 621737f982e8b581cc91b698cfbf5fbae1ccd0deef2c4bd6066bb48d9a736d0f: Status 404 returned error can't find the container with id 621737f982e8b581cc91b698cfbf5fbae1ccd0deef2c4bd6066bb48d9a736d0f Sep 30 10:09:11 crc kubenswrapper[4730]: I0930 10:09:11.726583 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"994beb56-6886-40ad-88f9-98e66828cafd","Type":"ContainerStarted","Data":"621737f982e8b581cc91b698cfbf5fbae1ccd0deef2c4bd6066bb48d9a736d0f"} Sep 30 10:09:12 crc kubenswrapper[4730]: I0930 10:09:12.393117 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5ac3d92-1949-4134-9a3a-5af8b1f7e914" path="/var/lib/kubelet/pods/f5ac3d92-1949-4134-9a3a-5af8b1f7e914/volumes" Sep 30 10:09:12 crc kubenswrapper[4730]: I0930 10:09:12.746697 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"994beb56-6886-40ad-88f9-98e66828cafd","Type":"ContainerStarted","Data":"26c3f8acd99c58dd7c7ffa2e39768b3752aa2e2e49987dcdbe3d46a3d76d528b"} Sep 30 10:09:12 crc kubenswrapper[4730]: I0930 10:09:12.746739 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"994beb56-6886-40ad-88f9-98e66828cafd","Type":"ContainerStarted","Data":"f314f1f1ee72b0424cc3d973d3a7162d6cab86d40721a9a6d85d1bf05ee6225d"} Sep 30 10:09:13 crc kubenswrapper[4730]: I0930 10:09:13.763795 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"994beb56-6886-40ad-88f9-98e66828cafd","Type":"ContainerStarted","Data":"7ed151c7840c898d7d2eb7e101da3a438c14fec51d25187a09a03713b50ca041"} Sep 30 10:09:14 crc kubenswrapper[4730]: I0930 10:09:14.772258 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"994beb56-6886-40ad-88f9-98e66828cafd","Type":"ContainerStarted","Data":"761137ad28d52bd9509b460a9ac82ad2ccb2003086aa456fd838e6219e521dac"} Sep 30 10:09:14 crc kubenswrapper[4730]: I0930 10:09:14.773929 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 10:09:29 crc kubenswrapper[4730]: I0930 10:09:29.914180 4730 generic.go:334] "Generic (PLEG): container finished" podID="baa411cd-87b0-4467-af6d-9a64df6f75b9" containerID="70a5681709d25a447d57a454fe9ae0deb302be92897371ffa057cbc9c6e37114" exitCode=0 Sep 30 10:09:29 crc kubenswrapper[4730]: I0930 10:09:29.914283 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-f7qp5" event={"ID":"baa411cd-87b0-4467-af6d-9a64df6f75b9","Type":"ContainerDied","Data":"70a5681709d25a447d57a454fe9ae0deb302be92897371ffa057cbc9c6e37114"} Sep 30 10:09:29 crc kubenswrapper[4730]: I0930 10:09:29.934488 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=17.227194175 podStartE2EDuration="19.934467976s" podCreationTimestamp="2025-09-30 10:09:10 +0000 UTC" firstStartedPulling="2025-09-30 10:09:11.657251804 +0000 UTC m=+1195.990511797" lastFinishedPulling="2025-09-30 10:09:14.364525605 +0000 UTC m=+1198.697785598" observedRunningTime="2025-09-30 10:09:14.796157852 +0000 UTC m=+1199.129417845" watchObservedRunningTime="2025-09-30 10:09:29.934467976 +0000 UTC m=+1214.267727969" Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.392676 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.506874 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-config-data\") pod \"baa411cd-87b0-4467-af6d-9a64df6f75b9\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.506973 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-scripts\") pod \"baa411cd-87b0-4467-af6d-9a64df6f75b9\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.507013 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbvwh\" (UniqueName: \"kubernetes.io/projected/baa411cd-87b0-4467-af6d-9a64df6f75b9-kube-api-access-bbvwh\") pod \"baa411cd-87b0-4467-af6d-9a64df6f75b9\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.507067 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-combined-ca-bundle\") pod \"baa411cd-87b0-4467-af6d-9a64df6f75b9\" (UID: \"baa411cd-87b0-4467-af6d-9a64df6f75b9\") " Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.517910 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baa411cd-87b0-4467-af6d-9a64df6f75b9-kube-api-access-bbvwh" (OuterVolumeSpecName: "kube-api-access-bbvwh") pod "baa411cd-87b0-4467-af6d-9a64df6f75b9" (UID: "baa411cd-87b0-4467-af6d-9a64df6f75b9"). InnerVolumeSpecName "kube-api-access-bbvwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.524053 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-scripts" (OuterVolumeSpecName: "scripts") pod "baa411cd-87b0-4467-af6d-9a64df6f75b9" (UID: "baa411cd-87b0-4467-af6d-9a64df6f75b9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.535902 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "baa411cd-87b0-4467-af6d-9a64df6f75b9" (UID: "baa411cd-87b0-4467-af6d-9a64df6f75b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.538206 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-config-data" (OuterVolumeSpecName: "config-data") pod "baa411cd-87b0-4467-af6d-9a64df6f75b9" (UID: "baa411cd-87b0-4467-af6d-9a64df6f75b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.609143 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.609215 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.609229 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbvwh\" (UniqueName: \"kubernetes.io/projected/baa411cd-87b0-4467-af6d-9a64df6f75b9-kube-api-access-bbvwh\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.609244 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa411cd-87b0-4467-af6d-9a64df6f75b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.931300 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-f7qp5" event={"ID":"baa411cd-87b0-4467-af6d-9a64df6f75b9","Type":"ContainerDied","Data":"d0461d8b348e749b9f000889e777bf06e4e0573407815f3f628c5a4ae7427ad8"} Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.931343 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0461d8b348e749b9f000889e777bf06e4e0573407815f3f628c5a4ae7427ad8" Sep 30 10:09:31 crc kubenswrapper[4730]: I0930 10:09:31.931778 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-f7qp5" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.072831 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 10:09:32 crc kubenswrapper[4730]: E0930 10:09:32.073214 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baa411cd-87b0-4467-af6d-9a64df6f75b9" containerName="nova-cell0-conductor-db-sync" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.073229 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="baa411cd-87b0-4467-af6d-9a64df6f75b9" containerName="nova-cell0-conductor-db-sync" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.073397 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="baa411cd-87b0-4467-af6d-9a64df6f75b9" containerName="nova-cell0-conductor-db-sync" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.074041 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.076957 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-cfjds" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.077200 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.090894 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.220699 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dt87\" (UniqueName: \"kubernetes.io/projected/4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a-kube-api-access-6dt87\") pod \"nova-cell0-conductor-0\" (UID: \"4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a\") " pod="openstack/nova-cell0-conductor-0" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.220858 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a\") " pod="openstack/nova-cell0-conductor-0" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.220899 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a\") " pod="openstack/nova-cell0-conductor-0" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.322426 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a\") " pod="openstack/nova-cell0-conductor-0" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.322487 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a\") " pod="openstack/nova-cell0-conductor-0" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.322520 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dt87\" (UniqueName: \"kubernetes.io/projected/4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a-kube-api-access-6dt87\") pod \"nova-cell0-conductor-0\" (UID: \"4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a\") " pod="openstack/nova-cell0-conductor-0" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.327692 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a\") " pod="openstack/nova-cell0-conductor-0" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.332184 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a\") " pod="openstack/nova-cell0-conductor-0" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.338612 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dt87\" (UniqueName: \"kubernetes.io/projected/4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a-kube-api-access-6dt87\") pod \"nova-cell0-conductor-0\" (UID: \"4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a\") " pod="openstack/nova-cell0-conductor-0" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.390632 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.822489 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 10:09:32 crc kubenswrapper[4730]: W0930 10:09:32.834030 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b30e43c_c603_4ad2_a95b_8a8a1ac5a46a.slice/crio-b2b71ed04ece244158dabd2e4528c26e435f190ff16ee0416319d42cc936ebe8 WatchSource:0}: Error finding container b2b71ed04ece244158dabd2e4528c26e435f190ff16ee0416319d42cc936ebe8: Status 404 returned error can't find the container with id b2b71ed04ece244158dabd2e4528c26e435f190ff16ee0416319d42cc936ebe8 Sep 30 10:09:32 crc kubenswrapper[4730]: I0930 10:09:32.940702 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a","Type":"ContainerStarted","Data":"b2b71ed04ece244158dabd2e4528c26e435f190ff16ee0416319d42cc936ebe8"} Sep 30 10:09:33 crc kubenswrapper[4730]: I0930 10:09:33.950840 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a","Type":"ContainerStarted","Data":"6c2d1f0450b917ce45709ddc1c02293e84f53dd80f9fcd1f8d6b198ca272ad09"} Sep 30 10:09:33 crc kubenswrapper[4730]: I0930 10:09:33.951277 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 30 10:09:33 crc kubenswrapper[4730]: I0930 10:09:33.978862 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.9788344740000001 podStartE2EDuration="1.978834474s" podCreationTimestamp="2025-09-30 10:09:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:09:33.971622376 +0000 UTC m=+1218.304882399" watchObservedRunningTime="2025-09-30 10:09:33.978834474 +0000 UTC m=+1218.312094467" Sep 30 10:09:37 crc kubenswrapper[4730]: I0930 10:09:37.417737 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 30 10:09:37 crc kubenswrapper[4730]: I0930 10:09:37.858342 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-6psh5"] Sep 30 10:09:37 crc kubenswrapper[4730]: I0930 10:09:37.859534 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:37 crc kubenswrapper[4730]: I0930 10:09:37.861649 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 30 10:09:37 crc kubenswrapper[4730]: I0930 10:09:37.862768 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 30 10:09:37 crc kubenswrapper[4730]: I0930 10:09:37.874281 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-6psh5"] Sep 30 10:09:37 crc kubenswrapper[4730]: I0930 10:09:37.927438 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6psh5\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:37 crc kubenswrapper[4730]: I0930 10:09:37.927537 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-scripts\") pod \"nova-cell0-cell-mapping-6psh5\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:37 crc kubenswrapper[4730]: I0930 10:09:37.927576 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-config-data\") pod \"nova-cell0-cell-mapping-6psh5\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:37 crc kubenswrapper[4730]: I0930 10:09:37.927764 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94s6x\" (UniqueName: \"kubernetes.io/projected/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-kube-api-access-94s6x\") pod \"nova-cell0-cell-mapping-6psh5\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.029207 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94s6x\" (UniqueName: \"kubernetes.io/projected/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-kube-api-access-94s6x\") pod \"nova-cell0-cell-mapping-6psh5\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.029653 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6psh5\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.029697 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-scripts\") pod \"nova-cell0-cell-mapping-6psh5\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.029737 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-config-data\") pod \"nova-cell0-cell-mapping-6psh5\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.034647 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6psh5\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.035680 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-scripts\") pod \"nova-cell0-cell-mapping-6psh5\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.038648 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-config-data\") pod \"nova-cell0-cell-mapping-6psh5\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.072958 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.074433 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.078952 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.083442 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94s6x\" (UniqueName: \"kubernetes.io/projected/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-kube-api-access-94s6x\") pod \"nova-cell0-cell-mapping-6psh5\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.100721 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.196873 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.234390 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5263576-d080-4153-b198-1eb7caaebf06-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a5263576-d080-4153-b198-1eb7caaebf06\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.234506 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnpvl\" (UniqueName: \"kubernetes.io/projected/a5263576-d080-4153-b198-1eb7caaebf06-kube-api-access-fnpvl\") pod \"nova-cell1-novncproxy-0\" (UID: \"a5263576-d080-4153-b198-1eb7caaebf06\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.234551 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5263576-d080-4153-b198-1eb7caaebf06-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a5263576-d080-4153-b198-1eb7caaebf06\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.235432 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.237386 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.240491 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.269607 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.274468 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.300740 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.336407 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-logs\") pod \"nova-api-0\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.336462 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnpvl\" (UniqueName: \"kubernetes.io/projected/a5263576-d080-4153-b198-1eb7caaebf06-kube-api-access-fnpvl\") pod \"nova-cell1-novncproxy-0\" (UID: \"a5263576-d080-4153-b198-1eb7caaebf06\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.336503 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5263576-d080-4153-b198-1eb7caaebf06-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a5263576-d080-4153-b198-1eb7caaebf06\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.336535 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56117495-4103-4c6b-a1ea-079e61844032-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"56117495-4103-4c6b-a1ea-079e61844032\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.336642 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-config-data\") pod \"nova-api-0\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.336732 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.336755 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ztll\" (UniqueName: \"kubernetes.io/projected/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-kube-api-access-7ztll\") pod \"nova-api-0\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.336798 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5263576-d080-4153-b198-1eb7caaebf06-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a5263576-d080-4153-b198-1eb7caaebf06\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.336838 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56117495-4103-4c6b-a1ea-079e61844032-config-data\") pod \"nova-scheduler-0\" (UID: \"56117495-4103-4c6b-a1ea-079e61844032\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.336897 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88w6d\" (UniqueName: \"kubernetes.io/projected/56117495-4103-4c6b-a1ea-079e61844032-kube-api-access-88w6d\") pod \"nova-scheduler-0\" (UID: \"56117495-4103-4c6b-a1ea-079e61844032\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.346243 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.366295 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5263576-d080-4153-b198-1eb7caaebf06-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a5263576-d080-4153-b198-1eb7caaebf06\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.381346 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnpvl\" (UniqueName: \"kubernetes.io/projected/a5263576-d080-4153-b198-1eb7caaebf06-kube-api-access-fnpvl\") pod \"nova-cell1-novncproxy-0\" (UID: \"a5263576-d080-4153-b198-1eb7caaebf06\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.382028 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5263576-d080-4153-b198-1eb7caaebf06-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a5263576-d080-4153-b198-1eb7caaebf06\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.413157 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.437927 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-config-data\") pod \"nova-api-0\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.437996 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.438014 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ztll\" (UniqueName: \"kubernetes.io/projected/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-kube-api-access-7ztll\") pod \"nova-api-0\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.438043 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56117495-4103-4c6b-a1ea-079e61844032-config-data\") pod \"nova-scheduler-0\" (UID: \"56117495-4103-4c6b-a1ea-079e61844032\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.438081 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88w6d\" (UniqueName: \"kubernetes.io/projected/56117495-4103-4c6b-a1ea-079e61844032-kube-api-access-88w6d\") pod \"nova-scheduler-0\" (UID: \"56117495-4103-4c6b-a1ea-079e61844032\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.438103 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-logs\") pod \"nova-api-0\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.438136 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56117495-4103-4c6b-a1ea-079e61844032-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"56117495-4103-4c6b-a1ea-079e61844032\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.444033 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-logs\") pod \"nova-api-0\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.448399 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-config-data\") pod \"nova-api-0\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.454625 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.455326 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56117495-4103-4c6b-a1ea-079e61844032-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"56117495-4103-4c6b-a1ea-079e61844032\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.455478 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56117495-4103-4c6b-a1ea-079e61844032-config-data\") pod \"nova-scheduler-0\" (UID: \"56117495-4103-4c6b-a1ea-079e61844032\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.458219 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.460400 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.475036 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.477761 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.481703 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.485529 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88w6d\" (UniqueName: \"kubernetes.io/projected/56117495-4103-4c6b-a1ea-079e61844032-kube-api-access-88w6d\") pod \"nova-scheduler-0\" (UID: \"56117495-4103-4c6b-a1ea-079e61844032\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.487244 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.494100 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ztll\" (UniqueName: \"kubernetes.io/projected/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-kube-api-access-7ztll\") pod \"nova-api-0\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.511960 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb"] Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.514331 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.526141 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb"] Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.544359 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81884103-bb2e-4e8d-b89c-ac82634026f3-logs\") pod \"nova-metadata-0\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.544421 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81884103-bb2e-4e8d-b89c-ac82634026f3-config-data\") pod \"nova-metadata-0\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.544474 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81884103-bb2e-4e8d-b89c-ac82634026f3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.547976 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-678zz\" (UniqueName: \"kubernetes.io/projected/81884103-bb2e-4e8d-b89c-ac82634026f3-kube-api-access-678zz\") pod \"nova-metadata-0\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.650320 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81884103-bb2e-4e8d-b89c-ac82634026f3-config-data\") pod \"nova-metadata-0\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.650709 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qcwm\" (UniqueName: \"kubernetes.io/projected/64067ae9-7904-4133-9c1f-f09a64dd3209-kube-api-access-6qcwm\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.650741 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81884103-bb2e-4e8d-b89c-ac82634026f3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.650815 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-678zz\" (UniqueName: \"kubernetes.io/projected/81884103-bb2e-4e8d-b89c-ac82634026f3-kube-api-access-678zz\") pod \"nova-metadata-0\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.650864 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-config\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.650892 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-dns-svc\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.650924 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-ovsdbserver-nb\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.651126 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-ovsdbserver-sb\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.651499 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81884103-bb2e-4e8d-b89c-ac82634026f3-logs\") pod \"nova-metadata-0\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.652047 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81884103-bb2e-4e8d-b89c-ac82634026f3-logs\") pod \"nova-metadata-0\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.656255 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81884103-bb2e-4e8d-b89c-ac82634026f3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.664918 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81884103-bb2e-4e8d-b89c-ac82634026f3-config-data\") pod \"nova-metadata-0\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.671179 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-678zz\" (UniqueName: \"kubernetes.io/projected/81884103-bb2e-4e8d-b89c-ac82634026f3-kube-api-access-678zz\") pod \"nova-metadata-0\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.749106 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.753142 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qcwm\" (UniqueName: \"kubernetes.io/projected/64067ae9-7904-4133-9c1f-f09a64dd3209-kube-api-access-6qcwm\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.753240 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-config\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.753259 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-dns-svc\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.753279 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-ovsdbserver-nb\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.753315 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-ovsdbserver-sb\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.754978 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-config\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.756117 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-dns-svc\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.756534 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-ovsdbserver-nb\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.759823 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-ovsdbserver-sb\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.786858 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qcwm\" (UniqueName: \"kubernetes.io/projected/64067ae9-7904-4133-9c1f-f09a64dd3209-kube-api-access-6qcwm\") pod \"dnsmasq-dns-6dfdd9cfdc-s9sqb\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.818584 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:09:38 crc kubenswrapper[4730]: I0930 10:09:38.857121 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.066112 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-6psh5"] Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.093563 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.108002 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.368566 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:09:39 crc kubenswrapper[4730]: W0930 10:09:39.387313 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56117495_4103_4c6b_a1ea_079e61844032.slice/crio-4cc97f402399d32a228d6472b5eb07f91515197429017e2600c311d7c41b12e0 WatchSource:0}: Error finding container 4cc97f402399d32a228d6472b5eb07f91515197429017e2600c311d7c41b12e0: Status 404 returned error can't find the container with id 4cc97f402399d32a228d6472b5eb07f91515197429017e2600c311d7c41b12e0 Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.407140 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hjjbx"] Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.408765 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.413023 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.413532 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.436467 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hjjbx"] Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.469946 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-config-data\") pod \"nova-cell1-conductor-db-sync-hjjbx\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.470049 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-scripts\") pod \"nova-cell1-conductor-db-sync-hjjbx\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.470071 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cxhk\" (UniqueName: \"kubernetes.io/projected/043c0fe0-8c26-466a-bdde-0c6b9917e73f-kube-api-access-5cxhk\") pod \"nova-cell1-conductor-db-sync-hjjbx\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.470166 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hjjbx\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.574076 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-config-data\") pod \"nova-cell1-conductor-db-sync-hjjbx\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.574813 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-scripts\") pod \"nova-cell1-conductor-db-sync-hjjbx\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.574846 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cxhk\" (UniqueName: \"kubernetes.io/projected/043c0fe0-8c26-466a-bdde-0c6b9917e73f-kube-api-access-5cxhk\") pod \"nova-cell1-conductor-db-sync-hjjbx\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.575082 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hjjbx\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.595940 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-config-data\") pod \"nova-cell1-conductor-db-sync-hjjbx\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.596660 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hjjbx\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.596968 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-scripts\") pod \"nova-cell1-conductor-db-sync-hjjbx\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.600291 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cxhk\" (UniqueName: \"kubernetes.io/projected/043c0fe0-8c26-466a-bdde-0c6b9917e73f-kube-api-access-5cxhk\") pod \"nova-cell1-conductor-db-sync-hjjbx\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.650009 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb"] Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.666332 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:09:39 crc kubenswrapper[4730]: W0930 10:09:39.677965 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64067ae9_7904_4133_9c1f_f09a64dd3209.slice/crio-83fac0ba688303ed39ea4bbd1dd8314a3deecf85c8a481e6289848d3cee1e9bc WatchSource:0}: Error finding container 83fac0ba688303ed39ea4bbd1dd8314a3deecf85c8a481e6289848d3cee1e9bc: Status 404 returned error can't find the container with id 83fac0ba688303ed39ea4bbd1dd8314a3deecf85c8a481e6289848d3cee1e9bc Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.806220 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:39 crc kubenswrapper[4730]: I0930 10:09:39.808323 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:40 crc kubenswrapper[4730]: I0930 10:09:40.052654 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50","Type":"ContainerStarted","Data":"fae09b1246b52fa4dab771aec4ce712663d47094cd4912125bd23eae89bd6515"} Sep 30 10:09:40 crc kubenswrapper[4730]: I0930 10:09:40.054996 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"56117495-4103-4c6b-a1ea-079e61844032","Type":"ContainerStarted","Data":"4cc97f402399d32a228d6472b5eb07f91515197429017e2600c311d7c41b12e0"} Sep 30 10:09:40 crc kubenswrapper[4730]: I0930 10:09:40.059732 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6psh5" event={"ID":"075c7eb9-74a6-49ca-a4b1-3fccba0ec354","Type":"ContainerStarted","Data":"0d11052bbd9c6a833feb43a9effc061106550825f8fcae90ac58eec1e239bc02"} Sep 30 10:09:40 crc kubenswrapper[4730]: I0930 10:09:40.059763 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6psh5" event={"ID":"075c7eb9-74a6-49ca-a4b1-3fccba0ec354","Type":"ContainerStarted","Data":"d126ee4c6eaa70606fa586d20efea172e643db18a42d3bddf99e61e383e21246"} Sep 30 10:09:40 crc kubenswrapper[4730]: I0930 10:09:40.084847 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" event={"ID":"64067ae9-7904-4133-9c1f-f09a64dd3209","Type":"ContainerStarted","Data":"fb1693baff86423933117205eb4ddd3fb4c95696cd57ff870d3b2179798c4193"} Sep 30 10:09:40 crc kubenswrapper[4730]: I0930 10:09:40.084910 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" event={"ID":"64067ae9-7904-4133-9c1f-f09a64dd3209","Type":"ContainerStarted","Data":"83fac0ba688303ed39ea4bbd1dd8314a3deecf85c8a481e6289848d3cee1e9bc"} Sep 30 10:09:40 crc kubenswrapper[4730]: I0930 10:09:40.100330 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a5263576-d080-4153-b198-1eb7caaebf06","Type":"ContainerStarted","Data":"af0e0e980b8abea3dd86624f791aa32dbc3a697a6b1bffba8921cc3520301979"} Sep 30 10:09:40 crc kubenswrapper[4730]: I0930 10:09:40.110910 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"81884103-bb2e-4e8d-b89c-ac82634026f3","Type":"ContainerStarted","Data":"59e49f243c045ab1e7baac9e31ec1b1139d6524def4e7136363730023906dff5"} Sep 30 10:09:40 crc kubenswrapper[4730]: I0930 10:09:40.117101 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-6psh5" podStartSLOduration=3.116981606 podStartE2EDuration="3.116981606s" podCreationTimestamp="2025-09-30 10:09:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:09:40.081559429 +0000 UTC m=+1224.414819432" watchObservedRunningTime="2025-09-30 10:09:40.116981606 +0000 UTC m=+1224.450241599" Sep 30 10:09:40 crc kubenswrapper[4730]: I0930 10:09:40.409802 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hjjbx"] Sep 30 10:09:41 crc kubenswrapper[4730]: I0930 10:09:41.128085 4730 generic.go:334] "Generic (PLEG): container finished" podID="64067ae9-7904-4133-9c1f-f09a64dd3209" containerID="fb1693baff86423933117205eb4ddd3fb4c95696cd57ff870d3b2179798c4193" exitCode=0 Sep 30 10:09:41 crc kubenswrapper[4730]: I0930 10:09:41.128154 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" event={"ID":"64067ae9-7904-4133-9c1f-f09a64dd3209","Type":"ContainerDied","Data":"fb1693baff86423933117205eb4ddd3fb4c95696cd57ff870d3b2179798c4193"} Sep 30 10:09:41 crc kubenswrapper[4730]: I0930 10:09:41.128181 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" event={"ID":"64067ae9-7904-4133-9c1f-f09a64dd3209","Type":"ContainerStarted","Data":"b369be31d719c6ab268dc519c85726e61f2d1a1f8e2769006f34e26002ae7c98"} Sep 30 10:09:41 crc kubenswrapper[4730]: I0930 10:09:41.128222 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:41 crc kubenswrapper[4730]: I0930 10:09:41.129425 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hjjbx" event={"ID":"043c0fe0-8c26-466a-bdde-0c6b9917e73f","Type":"ContainerStarted","Data":"19f1e9633a53aefd3c05d094a219ecb00dc65589e62ff061c51265db1a6692b6"} Sep 30 10:09:41 crc kubenswrapper[4730]: I0930 10:09:41.155442 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" podStartSLOduration=3.155399506 podStartE2EDuration="3.155399506s" podCreationTimestamp="2025-09-30 10:09:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:09:41.144318066 +0000 UTC m=+1225.477578059" watchObservedRunningTime="2025-09-30 10:09:41.155399506 +0000 UTC m=+1225.488659519" Sep 30 10:09:41 crc kubenswrapper[4730]: I0930 10:09:41.181917 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 10:09:42 crc kubenswrapper[4730]: I0930 10:09:42.377495 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 10:09:42 crc kubenswrapper[4730]: I0930 10:09:42.405188 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:44 crc kubenswrapper[4730]: I0930 10:09:44.166091 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50","Type":"ContainerStarted","Data":"cb029ee5d3ee981d10f0e1e5c74853b27578be3c194f15aa0f3e22e4e4efb1f2"} Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.183908 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hjjbx" event={"ID":"043c0fe0-8c26-466a-bdde-0c6b9917e73f","Type":"ContainerStarted","Data":"6bfbce02fb7811a3d6c7e52798c9abf02fea72ab6852cd388f91a6b2132ace1d"} Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.187054 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a5263576-d080-4153-b198-1eb7caaebf06","Type":"ContainerStarted","Data":"7d4edf7d3019983206e72a280d3e7d7fe0fa083783eb193ec4f96f7be87bc249"} Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.187235 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="a5263576-d080-4153-b198-1eb7caaebf06" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://7d4edf7d3019983206e72a280d3e7d7fe0fa083783eb193ec4f96f7be87bc249" gracePeriod=30 Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.192434 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"81884103-bb2e-4e8d-b89c-ac82634026f3","Type":"ContainerStarted","Data":"e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815"} Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.192475 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"81884103-bb2e-4e8d-b89c-ac82634026f3","Type":"ContainerStarted","Data":"8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb"} Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.192578 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="81884103-bb2e-4e8d-b89c-ac82634026f3" containerName="nova-metadata-log" containerID="cri-o://8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb" gracePeriod=30 Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.192805 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="81884103-bb2e-4e8d-b89c-ac82634026f3" containerName="nova-metadata-metadata" containerID="cri-o://e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815" gracePeriod=30 Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.200173 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50","Type":"ContainerStarted","Data":"9ad55bf011f4463153847bc72efd8540e09edbaa16aaee0b46f9f39aa520ec49"} Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.207934 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-hjjbx" podStartSLOduration=6.207911868 podStartE2EDuration="6.207911868s" podCreationTimestamp="2025-09-30 10:09:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:09:45.202541938 +0000 UTC m=+1229.535801941" watchObservedRunningTime="2025-09-30 10:09:45.207911868 +0000 UTC m=+1229.541171861" Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.208270 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"56117495-4103-4c6b-a1ea-079e61844032","Type":"ContainerStarted","Data":"e23366eb93f323b3a6fd3e3b6b0a7472c29071f13be91b9aea87652e46739911"} Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.235758 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.310342221 podStartE2EDuration="7.235739566s" podCreationTimestamp="2025-09-30 10:09:38 +0000 UTC" firstStartedPulling="2025-09-30 10:09:39.681113838 +0000 UTC m=+1224.014373831" lastFinishedPulling="2025-09-30 10:09:43.606511183 +0000 UTC m=+1227.939771176" observedRunningTime="2025-09-30 10:09:45.230003966 +0000 UTC m=+1229.563263949" watchObservedRunningTime="2025-09-30 10:09:45.235739566 +0000 UTC m=+1229.568999559" Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.257435 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.7609332220000002 podStartE2EDuration="7.257415214s" podCreationTimestamp="2025-09-30 10:09:38 +0000 UTC" firstStartedPulling="2025-09-30 10:09:39.107779512 +0000 UTC m=+1223.441039505" lastFinishedPulling="2025-09-30 10:09:43.604261504 +0000 UTC m=+1227.937521497" observedRunningTime="2025-09-30 10:09:45.253635775 +0000 UTC m=+1229.586895778" watchObservedRunningTime="2025-09-30 10:09:45.257415214 +0000 UTC m=+1229.590675207" Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.282292 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.513506559 podStartE2EDuration="7.282271834s" podCreationTimestamp="2025-09-30 10:09:38 +0000 UTC" firstStartedPulling="2025-09-30 10:09:39.834969655 +0000 UTC m=+1224.168229648" lastFinishedPulling="2025-09-30 10:09:43.60373493 +0000 UTC m=+1227.936994923" observedRunningTime="2025-09-30 10:09:45.27292965 +0000 UTC m=+1229.606189653" watchObservedRunningTime="2025-09-30 10:09:45.282271834 +0000 UTC m=+1229.615531847" Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.306547 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.097491571 podStartE2EDuration="7.306525529s" podCreationTimestamp="2025-09-30 10:09:38 +0000 UTC" firstStartedPulling="2025-09-30 10:09:39.389550257 +0000 UTC m=+1223.722810250" lastFinishedPulling="2025-09-30 10:09:43.598584215 +0000 UTC m=+1227.931844208" observedRunningTime="2025-09-30 10:09:45.299988308 +0000 UTC m=+1229.633248311" watchObservedRunningTime="2025-09-30 10:09:45.306525529 +0000 UTC m=+1229.639785522" Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.797896 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.807286 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-678zz\" (UniqueName: \"kubernetes.io/projected/81884103-bb2e-4e8d-b89c-ac82634026f3-kube-api-access-678zz\") pod \"81884103-bb2e-4e8d-b89c-ac82634026f3\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.807371 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81884103-bb2e-4e8d-b89c-ac82634026f3-config-data\") pod \"81884103-bb2e-4e8d-b89c-ac82634026f3\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.807442 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81884103-bb2e-4e8d-b89c-ac82634026f3-logs\") pod \"81884103-bb2e-4e8d-b89c-ac82634026f3\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.807495 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81884103-bb2e-4e8d-b89c-ac82634026f3-combined-ca-bundle\") pod \"81884103-bb2e-4e8d-b89c-ac82634026f3\" (UID: \"81884103-bb2e-4e8d-b89c-ac82634026f3\") " Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.808443 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81884103-bb2e-4e8d-b89c-ac82634026f3-logs" (OuterVolumeSpecName: "logs") pod "81884103-bb2e-4e8d-b89c-ac82634026f3" (UID: "81884103-bb2e-4e8d-b89c-ac82634026f3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.813245 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81884103-bb2e-4e8d-b89c-ac82634026f3-kube-api-access-678zz" (OuterVolumeSpecName: "kube-api-access-678zz") pod "81884103-bb2e-4e8d-b89c-ac82634026f3" (UID: "81884103-bb2e-4e8d-b89c-ac82634026f3"). InnerVolumeSpecName "kube-api-access-678zz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.849753 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81884103-bb2e-4e8d-b89c-ac82634026f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81884103-bb2e-4e8d-b89c-ac82634026f3" (UID: "81884103-bb2e-4e8d-b89c-ac82634026f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.860649 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81884103-bb2e-4e8d-b89c-ac82634026f3-config-data" (OuterVolumeSpecName: "config-data") pod "81884103-bb2e-4e8d-b89c-ac82634026f3" (UID: "81884103-bb2e-4e8d-b89c-ac82634026f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.909704 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81884103-bb2e-4e8d-b89c-ac82634026f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.909747 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-678zz\" (UniqueName: \"kubernetes.io/projected/81884103-bb2e-4e8d-b89c-ac82634026f3-kube-api-access-678zz\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.909762 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81884103-bb2e-4e8d-b89c-ac82634026f3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:45 crc kubenswrapper[4730]: I0930 10:09:45.909774 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81884103-bb2e-4e8d-b89c-ac82634026f3-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.218444 4730 generic.go:334] "Generic (PLEG): container finished" podID="81884103-bb2e-4e8d-b89c-ac82634026f3" containerID="e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815" exitCode=0 Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.218474 4730 generic.go:334] "Generic (PLEG): container finished" podID="81884103-bb2e-4e8d-b89c-ac82634026f3" containerID="8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb" exitCode=143 Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.218667 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"81884103-bb2e-4e8d-b89c-ac82634026f3","Type":"ContainerDied","Data":"e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815"} Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.218729 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"81884103-bb2e-4e8d-b89c-ac82634026f3","Type":"ContainerDied","Data":"8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb"} Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.218744 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"81884103-bb2e-4e8d-b89c-ac82634026f3","Type":"ContainerDied","Data":"59e49f243c045ab1e7baac9e31ec1b1139d6524def4e7136363730023906dff5"} Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.218768 4730 scope.go:117] "RemoveContainer" containerID="e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.219742 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.247115 4730 scope.go:117] "RemoveContainer" containerID="8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.251374 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.260917 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.269733 4730 scope.go:117] "RemoveContainer" containerID="e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815" Sep 30 10:09:46 crc kubenswrapper[4730]: E0930 10:09:46.270172 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815\": container with ID starting with e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815 not found: ID does not exist" containerID="e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.270355 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815"} err="failed to get container status \"e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815\": rpc error: code = NotFound desc = could not find container \"e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815\": container with ID starting with e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815 not found: ID does not exist" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.270458 4730 scope.go:117] "RemoveContainer" containerID="8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb" Sep 30 10:09:46 crc kubenswrapper[4730]: E0930 10:09:46.270837 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb\": container with ID starting with 8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb not found: ID does not exist" containerID="8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.270871 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb"} err="failed to get container status \"8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb\": rpc error: code = NotFound desc = could not find container \"8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb\": container with ID starting with 8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb not found: ID does not exist" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.270885 4730 scope.go:117] "RemoveContainer" containerID="e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.271252 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815"} err="failed to get container status \"e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815\": rpc error: code = NotFound desc = could not find container \"e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815\": container with ID starting with e67abcdc36984a2cb793b43a0301fdeb6f7c929ab314ec58b5fd9e65af428815 not found: ID does not exist" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.271348 4730 scope.go:117] "RemoveContainer" containerID="8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.271653 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb"} err="failed to get container status \"8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb\": rpc error: code = NotFound desc = could not find container \"8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb\": container with ID starting with 8469dc03da44804225cb92fa6e052d16c54d0072b90416a8b2763e5f02238bbb not found: ID does not exist" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.284095 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:46 crc kubenswrapper[4730]: E0930 10:09:46.284599 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81884103-bb2e-4e8d-b89c-ac82634026f3" containerName="nova-metadata-log" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.284640 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="81884103-bb2e-4e8d-b89c-ac82634026f3" containerName="nova-metadata-log" Sep 30 10:09:46 crc kubenswrapper[4730]: E0930 10:09:46.284663 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81884103-bb2e-4e8d-b89c-ac82634026f3" containerName="nova-metadata-metadata" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.284679 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="81884103-bb2e-4e8d-b89c-ac82634026f3" containerName="nova-metadata-metadata" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.284886 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="81884103-bb2e-4e8d-b89c-ac82634026f3" containerName="nova-metadata-metadata" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.284909 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="81884103-bb2e-4e8d-b89c-ac82634026f3" containerName="nova-metadata-log" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.286326 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.294458 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.297844 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.313928 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.316956 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr5nt\" (UniqueName: \"kubernetes.io/projected/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-kube-api-access-wr5nt\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.317001 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-config-data\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.317055 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.317221 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.317327 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-logs\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.394171 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81884103-bb2e-4e8d-b89c-ac82634026f3" path="/var/lib/kubelet/pods/81884103-bb2e-4e8d-b89c-ac82634026f3/volumes" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.418943 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-logs\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.419010 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr5nt\" (UniqueName: \"kubernetes.io/projected/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-kube-api-access-wr5nt\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.419028 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-config-data\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.419063 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.419132 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.419997 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-logs\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.425106 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.425411 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-config-data\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.436105 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.443921 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr5nt\" (UniqueName: \"kubernetes.io/projected/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-kube-api-access-wr5nt\") pod \"nova-metadata-0\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " pod="openstack/nova-metadata-0" Sep 30 10:09:46 crc kubenswrapper[4730]: I0930 10:09:46.606405 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:09:47 crc kubenswrapper[4730]: I0930 10:09:47.063720 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:47 crc kubenswrapper[4730]: I0930 10:09:47.230475 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14","Type":"ContainerStarted","Data":"623c4916e741ed999fa7f1252b70fe9b0a7f818b4743db316a93970076c64151"} Sep 30 10:09:48 crc kubenswrapper[4730]: I0930 10:09:48.240896 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14","Type":"ContainerStarted","Data":"e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2"} Sep 30 10:09:48 crc kubenswrapper[4730]: I0930 10:09:48.241422 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14","Type":"ContainerStarted","Data":"c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8"} Sep 30 10:09:48 crc kubenswrapper[4730]: I0930 10:09:48.261356 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.261338539 podStartE2EDuration="2.261338539s" podCreationTimestamp="2025-09-30 10:09:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:09:48.259393019 +0000 UTC m=+1232.592653012" watchObservedRunningTime="2025-09-30 10:09:48.261338539 +0000 UTC m=+1232.594598532" Sep 30 10:09:48 crc kubenswrapper[4730]: I0930 10:09:48.490523 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 10:09:48 crc kubenswrapper[4730]: I0930 10:09:48.490602 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:09:48 crc kubenswrapper[4730]: I0930 10:09:48.490655 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 10:09:48 crc kubenswrapper[4730]: I0930 10:09:48.519296 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 10:09:48 crc kubenswrapper[4730]: I0930 10:09:48.750688 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 10:09:48 crc kubenswrapper[4730]: I0930 10:09:48.750794 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 10:09:48 crc kubenswrapper[4730]: I0930 10:09:48.859902 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:09:48 crc kubenswrapper[4730]: I0930 10:09:48.926942 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c65c577dc-tzdmg"] Sep 30 10:09:48 crc kubenswrapper[4730]: I0930 10:09:48.927215 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" podUID="47203112-feec-49c6-a377-145258876393" containerName="dnsmasq-dns" containerID="cri-o://c8d23dcdc699d9f08de7751fd49ce06b7faf82a17e559868455cf1297fd73a1e" gracePeriod=10 Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.259466 4730 generic.go:334] "Generic (PLEG): container finished" podID="47203112-feec-49c6-a377-145258876393" containerID="c8d23dcdc699d9f08de7751fd49ce06b7faf82a17e559868455cf1297fd73a1e" exitCode=0 Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.259838 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" event={"ID":"47203112-feec-49c6-a377-145258876393","Type":"ContainerDied","Data":"c8d23dcdc699d9f08de7751fd49ce06b7faf82a17e559868455cf1297fd73a1e"} Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.266857 4730 generic.go:334] "Generic (PLEG): container finished" podID="075c7eb9-74a6-49ca-a4b1-3fccba0ec354" containerID="0d11052bbd9c6a833feb43a9effc061106550825f8fcae90ac58eec1e239bc02" exitCode=0 Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.266885 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6psh5" event={"ID":"075c7eb9-74a6-49ca-a4b1-3fccba0ec354","Type":"ContainerDied","Data":"0d11052bbd9c6a833feb43a9effc061106550825f8fcae90ac58eec1e239bc02"} Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.316817 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.462072 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.583249 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-dns-svc\") pod \"47203112-feec-49c6-a377-145258876393\" (UID: \"47203112-feec-49c6-a377-145258876393\") " Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.583363 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gncfx\" (UniqueName: \"kubernetes.io/projected/47203112-feec-49c6-a377-145258876393-kube-api-access-gncfx\") pod \"47203112-feec-49c6-a377-145258876393\" (UID: \"47203112-feec-49c6-a377-145258876393\") " Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.583456 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-ovsdbserver-sb\") pod \"47203112-feec-49c6-a377-145258876393\" (UID: \"47203112-feec-49c6-a377-145258876393\") " Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.583504 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-config\") pod \"47203112-feec-49c6-a377-145258876393\" (UID: \"47203112-feec-49c6-a377-145258876393\") " Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.583571 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-ovsdbserver-nb\") pod \"47203112-feec-49c6-a377-145258876393\" (UID: \"47203112-feec-49c6-a377-145258876393\") " Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.602854 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47203112-feec-49c6-a377-145258876393-kube-api-access-gncfx" (OuterVolumeSpecName: "kube-api-access-gncfx") pod "47203112-feec-49c6-a377-145258876393" (UID: "47203112-feec-49c6-a377-145258876393"). InnerVolumeSpecName "kube-api-access-gncfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.638904 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "47203112-feec-49c6-a377-145258876393" (UID: "47203112-feec-49c6-a377-145258876393"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.643874 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-config" (OuterVolumeSpecName: "config") pod "47203112-feec-49c6-a377-145258876393" (UID: "47203112-feec-49c6-a377-145258876393"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.648476 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "47203112-feec-49c6-a377-145258876393" (UID: "47203112-feec-49c6-a377-145258876393"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.667265 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "47203112-feec-49c6-a377-145258876393" (UID: "47203112-feec-49c6-a377-145258876393"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.687631 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gncfx\" (UniqueName: \"kubernetes.io/projected/47203112-feec-49c6-a377-145258876393-kube-api-access-gncfx\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.687671 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.687684 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.687697 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.687708 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/47203112-feec-49c6-a377-145258876393-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.832815 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 10:09:49 crc kubenswrapper[4730]: I0930 10:09:49.832815 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.296134 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.296791 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c65c577dc-tzdmg" event={"ID":"47203112-feec-49c6-a377-145258876393","Type":"ContainerDied","Data":"8224e587aafa27e2dce4e0fc0c1fd13467263849a015778394b42ed6033ed682"} Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.296831 4730 scope.go:117] "RemoveContainer" containerID="c8d23dcdc699d9f08de7751fd49ce06b7faf82a17e559868455cf1297fd73a1e" Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.331566 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c65c577dc-tzdmg"] Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.336984 4730 scope.go:117] "RemoveContainer" containerID="44f32d504f1491302cc30c6ff9346c69ceca8868628da7b3c9488db8e47d1b35" Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.344585 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c65c577dc-tzdmg"] Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.396252 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47203112-feec-49c6-a377-145258876393" path="/var/lib/kubelet/pods/47203112-feec-49c6-a377-145258876393/volumes" Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.746042 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.806172 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-combined-ca-bundle\") pod \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.806286 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94s6x\" (UniqueName: \"kubernetes.io/projected/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-kube-api-access-94s6x\") pod \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.806333 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-scripts\") pod \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.806362 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-config-data\") pod \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\" (UID: \"075c7eb9-74a6-49ca-a4b1-3fccba0ec354\") " Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.835861 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-scripts" (OuterVolumeSpecName: "scripts") pod "075c7eb9-74a6-49ca-a4b1-3fccba0ec354" (UID: "075c7eb9-74a6-49ca-a4b1-3fccba0ec354"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.857313 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-kube-api-access-94s6x" (OuterVolumeSpecName: "kube-api-access-94s6x") pod "075c7eb9-74a6-49ca-a4b1-3fccba0ec354" (UID: "075c7eb9-74a6-49ca-a4b1-3fccba0ec354"). InnerVolumeSpecName "kube-api-access-94s6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.866730 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-config-data" (OuterVolumeSpecName: "config-data") pod "075c7eb9-74a6-49ca-a4b1-3fccba0ec354" (UID: "075c7eb9-74a6-49ca-a4b1-3fccba0ec354"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.919801 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94s6x\" (UniqueName: \"kubernetes.io/projected/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-kube-api-access-94s6x\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.920042 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.920052 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:50 crc kubenswrapper[4730]: I0930 10:09:50.933776 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "075c7eb9-74a6-49ca-a4b1-3fccba0ec354" (UID: "075c7eb9-74a6-49ca-a4b1-3fccba0ec354"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.021570 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/075c7eb9-74a6-49ca-a4b1-3fccba0ec354-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.310906 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6psh5" event={"ID":"075c7eb9-74a6-49ca-a4b1-3fccba0ec354","Type":"ContainerDied","Data":"d126ee4c6eaa70606fa586d20efea172e643db18a42d3bddf99e61e383e21246"} Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.310942 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d126ee4c6eaa70606fa586d20efea172e643db18a42d3bddf99e61e383e21246" Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.311785 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6psh5" Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.457797 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.458059 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" containerName="nova-api-log" containerID="cri-o://cb029ee5d3ee981d10f0e1e5c74853b27578be3c194f15aa0f3e22e4e4efb1f2" gracePeriod=30 Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.458174 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" containerName="nova-api-api" containerID="cri-o://9ad55bf011f4463153847bc72efd8540e09edbaa16aaee0b46f9f39aa520ec49" gracePeriod=30 Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.476043 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.476250 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="56117495-4103-4c6b-a1ea-079e61844032" containerName="nova-scheduler-scheduler" containerID="cri-o://e23366eb93f323b3a6fd3e3b6b0a7472c29071f13be91b9aea87652e46739911" gracePeriod=30 Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.488895 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.489328 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" containerName="nova-metadata-log" containerID="cri-o://c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8" gracePeriod=30 Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.489979 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" containerName="nova-metadata-metadata" containerID="cri-o://e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2" gracePeriod=30 Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.607503 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 10:09:51 crc kubenswrapper[4730]: I0930 10:09:51.607792 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.040221 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.140237 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-combined-ca-bundle\") pod \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.140692 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-logs\") pod \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.140813 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wr5nt\" (UniqueName: \"kubernetes.io/projected/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-kube-api-access-wr5nt\") pod \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.141046 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-logs" (OuterVolumeSpecName: "logs") pod "f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" (UID: "f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.141484 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-nova-metadata-tls-certs\") pod \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.141742 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-config-data\") pod \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\" (UID: \"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14\") " Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.142987 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.148961 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-kube-api-access-wr5nt" (OuterVolumeSpecName: "kube-api-access-wr5nt") pod "f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" (UID: "f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14"). InnerVolumeSpecName "kube-api-access-wr5nt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.168491 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-config-data" (OuterVolumeSpecName: "config-data") pod "f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" (UID: "f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.200354 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" (UID: "f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.202515 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" (UID: "f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.245133 4730 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.245174 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.245188 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.245201 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wr5nt\" (UniqueName: \"kubernetes.io/projected/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14-kube-api-access-wr5nt\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.321788 4730 generic.go:334] "Generic (PLEG): container finished" podID="f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" containerID="cb029ee5d3ee981d10f0e1e5c74853b27578be3c194f15aa0f3e22e4e4efb1f2" exitCode=143 Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.321945 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50","Type":"ContainerDied","Data":"cb029ee5d3ee981d10f0e1e5c74853b27578be3c194f15aa0f3e22e4e4efb1f2"} Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.328743 4730 generic.go:334] "Generic (PLEG): container finished" podID="f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" containerID="e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2" exitCode=0 Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.328780 4730 generic.go:334] "Generic (PLEG): container finished" podID="f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" containerID="c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8" exitCode=143 Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.328805 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14","Type":"ContainerDied","Data":"e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2"} Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.328834 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14","Type":"ContainerDied","Data":"c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8"} Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.328846 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14","Type":"ContainerDied","Data":"623c4916e741ed999fa7f1252b70fe9b0a7f818b4743db316a93970076c64151"} Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.328862 4730 scope.go:117] "RemoveContainer" containerID="e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.328979 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.361783 4730 scope.go:117] "RemoveContainer" containerID="c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.379184 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.384034 4730 scope.go:117] "RemoveContainer" containerID="e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2" Sep 30 10:09:52 crc kubenswrapper[4730]: E0930 10:09:52.384476 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2\": container with ID starting with e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2 not found: ID does not exist" containerID="e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.384509 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2"} err="failed to get container status \"e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2\": rpc error: code = NotFound desc = could not find container \"e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2\": container with ID starting with e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2 not found: ID does not exist" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.384534 4730 scope.go:117] "RemoveContainer" containerID="c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8" Sep 30 10:09:52 crc kubenswrapper[4730]: E0930 10:09:52.384938 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8\": container with ID starting with c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8 not found: ID does not exist" containerID="c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.384967 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8"} err="failed to get container status \"c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8\": rpc error: code = NotFound desc = could not find container \"c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8\": container with ID starting with c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8 not found: ID does not exist" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.384989 4730 scope.go:117] "RemoveContainer" containerID="e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.385272 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2"} err="failed to get container status \"e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2\": rpc error: code = NotFound desc = could not find container \"e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2\": container with ID starting with e06d03a119f1fdc62a10ca904286ba4b75ba3e7b9cacf32c1655f36af3c0d7a2 not found: ID does not exist" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.385295 4730 scope.go:117] "RemoveContainer" containerID="c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.386962 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8"} err="failed to get container status \"c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8\": rpc error: code = NotFound desc = could not find container \"c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8\": container with ID starting with c7bcc4987b7fe865b9b83d8762fc0c7f8a6b28e8bb9f5a45d189b31839ddedd8 not found: ID does not exist" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.411972 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.413892 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:52 crc kubenswrapper[4730]: E0930 10:09:52.414331 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47203112-feec-49c6-a377-145258876393" containerName="init" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.414355 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="47203112-feec-49c6-a377-145258876393" containerName="init" Sep 30 10:09:52 crc kubenswrapper[4730]: E0930 10:09:52.414380 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="075c7eb9-74a6-49ca-a4b1-3fccba0ec354" containerName="nova-manage" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.414389 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="075c7eb9-74a6-49ca-a4b1-3fccba0ec354" containerName="nova-manage" Sep 30 10:09:52 crc kubenswrapper[4730]: E0930 10:09:52.414417 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47203112-feec-49c6-a377-145258876393" containerName="dnsmasq-dns" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.414425 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="47203112-feec-49c6-a377-145258876393" containerName="dnsmasq-dns" Sep 30 10:09:52 crc kubenswrapper[4730]: E0930 10:09:52.414439 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" containerName="nova-metadata-metadata" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.414446 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" containerName="nova-metadata-metadata" Sep 30 10:09:52 crc kubenswrapper[4730]: E0930 10:09:52.414459 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" containerName="nova-metadata-log" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.414466 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" containerName="nova-metadata-log" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.422163 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" containerName="nova-metadata-metadata" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.422216 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="075c7eb9-74a6-49ca-a4b1-3fccba0ec354" containerName="nova-manage" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.422250 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" containerName="nova-metadata-log" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.422262 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="47203112-feec-49c6-a377-145258876393" containerName="dnsmasq-dns" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.423721 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.426539 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.427084 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.427353 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.550056 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mxrn\" (UniqueName: \"kubernetes.io/projected/21add49c-8259-4090-88ac-34b1b97149b7-kube-api-access-2mxrn\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.550112 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.550191 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-config-data\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.550247 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.550278 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21add49c-8259-4090-88ac-34b1b97149b7-logs\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.652041 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mxrn\" (UniqueName: \"kubernetes.io/projected/21add49c-8259-4090-88ac-34b1b97149b7-kube-api-access-2mxrn\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.652427 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.652528 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-config-data\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.652629 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.652678 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21add49c-8259-4090-88ac-34b1b97149b7-logs\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.653318 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21add49c-8259-4090-88ac-34b1b97149b7-logs\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.666272 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.668502 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-config-data\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.671795 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.672994 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mxrn\" (UniqueName: \"kubernetes.io/projected/21add49c-8259-4090-88ac-34b1b97149b7-kube-api-access-2mxrn\") pod \"nova-metadata-0\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.794026 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.838362 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.855837 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-88w6d\" (UniqueName: \"kubernetes.io/projected/56117495-4103-4c6b-a1ea-079e61844032-kube-api-access-88w6d\") pod \"56117495-4103-4c6b-a1ea-079e61844032\" (UID: \"56117495-4103-4c6b-a1ea-079e61844032\") " Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.855918 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56117495-4103-4c6b-a1ea-079e61844032-combined-ca-bundle\") pod \"56117495-4103-4c6b-a1ea-079e61844032\" (UID: \"56117495-4103-4c6b-a1ea-079e61844032\") " Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.855946 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56117495-4103-4c6b-a1ea-079e61844032-config-data\") pod \"56117495-4103-4c6b-a1ea-079e61844032\" (UID: \"56117495-4103-4c6b-a1ea-079e61844032\") " Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.859309 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56117495-4103-4c6b-a1ea-079e61844032-kube-api-access-88w6d" (OuterVolumeSpecName: "kube-api-access-88w6d") pod "56117495-4103-4c6b-a1ea-079e61844032" (UID: "56117495-4103-4c6b-a1ea-079e61844032"). InnerVolumeSpecName "kube-api-access-88w6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.882950 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56117495-4103-4c6b-a1ea-079e61844032-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "56117495-4103-4c6b-a1ea-079e61844032" (UID: "56117495-4103-4c6b-a1ea-079e61844032"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.886911 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56117495-4103-4c6b-a1ea-079e61844032-config-data" (OuterVolumeSpecName: "config-data") pod "56117495-4103-4c6b-a1ea-079e61844032" (UID: "56117495-4103-4c6b-a1ea-079e61844032"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.957435 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-88w6d\" (UniqueName: \"kubernetes.io/projected/56117495-4103-4c6b-a1ea-079e61844032-kube-api-access-88w6d\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.957704 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56117495-4103-4c6b-a1ea-079e61844032-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:52 crc kubenswrapper[4730]: I0930 10:09:52.957715 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56117495-4103-4c6b-a1ea-079e61844032-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.360390 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.395581 4730 generic.go:334] "Generic (PLEG): container finished" podID="56117495-4103-4c6b-a1ea-079e61844032" containerID="e23366eb93f323b3a6fd3e3b6b0a7472c29071f13be91b9aea87652e46739911" exitCode=0 Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.395642 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"56117495-4103-4c6b-a1ea-079e61844032","Type":"ContainerDied","Data":"e23366eb93f323b3a6fd3e3b6b0a7472c29071f13be91b9aea87652e46739911"} Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.395677 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"56117495-4103-4c6b-a1ea-079e61844032","Type":"ContainerDied","Data":"4cc97f402399d32a228d6472b5eb07f91515197429017e2600c311d7c41b12e0"} Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.395677 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.395884 4730 scope.go:117] "RemoveContainer" containerID="e23366eb93f323b3a6fd3e3b6b0a7472c29071f13be91b9aea87652e46739911" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.434646 4730 scope.go:117] "RemoveContainer" containerID="e23366eb93f323b3a6fd3e3b6b0a7472c29071f13be91b9aea87652e46739911" Sep 30 10:09:53 crc kubenswrapper[4730]: E0930 10:09:53.439029 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e23366eb93f323b3a6fd3e3b6b0a7472c29071f13be91b9aea87652e46739911\": container with ID starting with e23366eb93f323b3a6fd3e3b6b0a7472c29071f13be91b9aea87652e46739911 not found: ID does not exist" containerID="e23366eb93f323b3a6fd3e3b6b0a7472c29071f13be91b9aea87652e46739911" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.439089 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e23366eb93f323b3a6fd3e3b6b0a7472c29071f13be91b9aea87652e46739911"} err="failed to get container status \"e23366eb93f323b3a6fd3e3b6b0a7472c29071f13be91b9aea87652e46739911\": rpc error: code = NotFound desc = could not find container \"e23366eb93f323b3a6fd3e3b6b0a7472c29071f13be91b9aea87652e46739911\": container with ID starting with e23366eb93f323b3a6fd3e3b6b0a7472c29071f13be91b9aea87652e46739911 not found: ID does not exist" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.441960 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.469800 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.486047 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:09:53 crc kubenswrapper[4730]: E0930 10:09:53.486466 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56117495-4103-4c6b-a1ea-079e61844032" containerName="nova-scheduler-scheduler" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.486484 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="56117495-4103-4c6b-a1ea-079e61844032" containerName="nova-scheduler-scheduler" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.486784 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="56117495-4103-4c6b-a1ea-079e61844032" containerName="nova-scheduler-scheduler" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.487459 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.489976 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.494090 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.568212 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-config-data\") pod \"nova-scheduler-0\" (UID: \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.568326 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2t2w\" (UniqueName: \"kubernetes.io/projected/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-kube-api-access-m2t2w\") pod \"nova-scheduler-0\" (UID: \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.568419 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.670125 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.670594 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-config-data\") pod \"nova-scheduler-0\" (UID: \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.670711 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2t2w\" (UniqueName: \"kubernetes.io/projected/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-kube-api-access-m2t2w\") pod \"nova-scheduler-0\" (UID: \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.674165 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.675273 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-config-data\") pod \"nova-scheduler-0\" (UID: \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.687018 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2t2w\" (UniqueName: \"kubernetes.io/projected/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-kube-api-access-m2t2w\") pod \"nova-scheduler-0\" (UID: \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\") " pod="openstack/nova-scheduler-0" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.820689 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.945496 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.976349 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-combined-ca-bundle\") pod \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.976496 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-config-data\") pod \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.976573 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ztll\" (UniqueName: \"kubernetes.io/projected/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-kube-api-access-7ztll\") pod \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.976736 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-logs\") pod \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\" (UID: \"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50\") " Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.977892 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-logs" (OuterVolumeSpecName: "logs") pod "f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" (UID: "f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:09:53 crc kubenswrapper[4730]: I0930 10:09:53.985387 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-kube-api-access-7ztll" (OuterVolumeSpecName: "kube-api-access-7ztll") pod "f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" (UID: "f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50"). InnerVolumeSpecName "kube-api-access-7ztll". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.012815 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" (UID: "f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.044769 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-config-data" (OuterVolumeSpecName: "config-data") pod "f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" (UID: "f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.079924 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.079981 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.079994 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.080007 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ztll\" (UniqueName: \"kubernetes.io/projected/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50-kube-api-access-7ztll\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.288478 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.390888 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56117495-4103-4c6b-a1ea-079e61844032" path="/var/lib/kubelet/pods/56117495-4103-4c6b-a1ea-079e61844032/volumes" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.391700 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14" path="/var/lib/kubelet/pods/f13a23ba-f2b3-4b5b-ae08-adcb32ed8c14/volumes" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.407111 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583","Type":"ContainerStarted","Data":"98e292868f7f80083d8a8b7db102974bec1f035e831bb290d64ffb57cf21817c"} Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.409732 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21add49c-8259-4090-88ac-34b1b97149b7","Type":"ContainerStarted","Data":"2796086665ed4226afa3534b09a17a44456280bb0d18f70a0bb4e0b5f40932d6"} Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.409754 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21add49c-8259-4090-88ac-34b1b97149b7","Type":"ContainerStarted","Data":"70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8"} Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.409763 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21add49c-8259-4090-88ac-34b1b97149b7","Type":"ContainerStarted","Data":"f306a8418dfd312287c795dcf4ad35e73e8543666c11df799b3a2ba2c1da9895"} Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.411674 4730 generic.go:334] "Generic (PLEG): container finished" podID="f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" containerID="9ad55bf011f4463153847bc72efd8540e09edbaa16aaee0b46f9f39aa520ec49" exitCode=0 Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.411722 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50","Type":"ContainerDied","Data":"9ad55bf011f4463153847bc72efd8540e09edbaa16aaee0b46f9f39aa520ec49"} Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.411739 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50","Type":"ContainerDied","Data":"fae09b1246b52fa4dab771aec4ce712663d47094cd4912125bd23eae89bd6515"} Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.411824 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.412025 4730 scope.go:117] "RemoveContainer" containerID="9ad55bf011f4463153847bc72efd8540e09edbaa16aaee0b46f9f39aa520ec49" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.433868 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.433851721 podStartE2EDuration="2.433851721s" podCreationTimestamp="2025-09-30 10:09:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:09:54.432199357 +0000 UTC m=+1238.765459450" watchObservedRunningTime="2025-09-30 10:09:54.433851721 +0000 UTC m=+1238.767111714" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.445551 4730 scope.go:117] "RemoveContainer" containerID="cb029ee5d3ee981d10f0e1e5c74853b27578be3c194f15aa0f3e22e4e4efb1f2" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.460457 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.474293 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.476720 4730 scope.go:117] "RemoveContainer" containerID="9ad55bf011f4463153847bc72efd8540e09edbaa16aaee0b46f9f39aa520ec49" Sep 30 10:09:54 crc kubenswrapper[4730]: E0930 10:09:54.477246 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ad55bf011f4463153847bc72efd8540e09edbaa16aaee0b46f9f39aa520ec49\": container with ID starting with 9ad55bf011f4463153847bc72efd8540e09edbaa16aaee0b46f9f39aa520ec49 not found: ID does not exist" containerID="9ad55bf011f4463153847bc72efd8540e09edbaa16aaee0b46f9f39aa520ec49" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.477341 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ad55bf011f4463153847bc72efd8540e09edbaa16aaee0b46f9f39aa520ec49"} err="failed to get container status \"9ad55bf011f4463153847bc72efd8540e09edbaa16aaee0b46f9f39aa520ec49\": rpc error: code = NotFound desc = could not find container \"9ad55bf011f4463153847bc72efd8540e09edbaa16aaee0b46f9f39aa520ec49\": container with ID starting with 9ad55bf011f4463153847bc72efd8540e09edbaa16aaee0b46f9f39aa520ec49 not found: ID does not exist" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.477423 4730 scope.go:117] "RemoveContainer" containerID="cb029ee5d3ee981d10f0e1e5c74853b27578be3c194f15aa0f3e22e4e4efb1f2" Sep 30 10:09:54 crc kubenswrapper[4730]: E0930 10:09:54.477725 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb029ee5d3ee981d10f0e1e5c74853b27578be3c194f15aa0f3e22e4e4efb1f2\": container with ID starting with cb029ee5d3ee981d10f0e1e5c74853b27578be3c194f15aa0f3e22e4e4efb1f2 not found: ID does not exist" containerID="cb029ee5d3ee981d10f0e1e5c74853b27578be3c194f15aa0f3e22e4e4efb1f2" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.477800 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb029ee5d3ee981d10f0e1e5c74853b27578be3c194f15aa0f3e22e4e4efb1f2"} err="failed to get container status \"cb029ee5d3ee981d10f0e1e5c74853b27578be3c194f15aa0f3e22e4e4efb1f2\": rpc error: code = NotFound desc = could not find container \"cb029ee5d3ee981d10f0e1e5c74853b27578be3c194f15aa0f3e22e4e4efb1f2\": container with ID starting with cb029ee5d3ee981d10f0e1e5c74853b27578be3c194f15aa0f3e22e4e4efb1f2 not found: ID does not exist" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.482570 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 10:09:54 crc kubenswrapper[4730]: E0930 10:09:54.483003 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" containerName="nova-api-log" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.483017 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" containerName="nova-api-log" Sep 30 10:09:54 crc kubenswrapper[4730]: E0930 10:09:54.483063 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" containerName="nova-api-api" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.483069 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" containerName="nova-api-api" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.483251 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" containerName="nova-api-api" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.483267 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" containerName="nova-api-log" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.484383 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.490051 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.507195 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.690181 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rtgn\" (UniqueName: \"kubernetes.io/projected/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-kube-api-access-2rtgn\") pod \"nova-api-0\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.690390 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-config-data\") pod \"nova-api-0\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.690547 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-logs\") pod \"nova-api-0\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.690855 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.792417 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.792488 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rtgn\" (UniqueName: \"kubernetes.io/projected/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-kube-api-access-2rtgn\") pod \"nova-api-0\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.792563 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-config-data\") pod \"nova-api-0\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.792594 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-logs\") pod \"nova-api-0\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.792973 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-logs\") pod \"nova-api-0\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.796849 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.796865 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-config-data\") pod \"nova-api-0\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.809140 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rtgn\" (UniqueName: \"kubernetes.io/projected/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-kube-api-access-2rtgn\") pod \"nova-api-0\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " pod="openstack/nova-api-0" Sep 30 10:09:54 crc kubenswrapper[4730]: I0930 10:09:54.814583 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:09:55 crc kubenswrapper[4730]: I0930 10:09:55.266068 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:09:55 crc kubenswrapper[4730]: I0930 10:09:55.441946 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583","Type":"ContainerStarted","Data":"3c7ff92795c93c78aa6f06e32fd6fd03afc8c243d51f4c28f80a20fe4f199b6e"} Sep 30 10:09:55 crc kubenswrapper[4730]: I0930 10:09:55.447025 4730 generic.go:334] "Generic (PLEG): container finished" podID="043c0fe0-8c26-466a-bdde-0c6b9917e73f" containerID="6bfbce02fb7811a3d6c7e52798c9abf02fea72ab6852cd388f91a6b2132ace1d" exitCode=0 Sep 30 10:09:55 crc kubenswrapper[4730]: I0930 10:09:55.447074 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hjjbx" event={"ID":"043c0fe0-8c26-466a-bdde-0c6b9917e73f","Type":"ContainerDied","Data":"6bfbce02fb7811a3d6c7e52798c9abf02fea72ab6852cd388f91a6b2132ace1d"} Sep 30 10:09:55 crc kubenswrapper[4730]: I0930 10:09:55.448497 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d1e9147-373e-4cfc-a181-eaf5e9d75e94","Type":"ContainerStarted","Data":"42e91d1ba41417e6fa95e8666671706ae42dd425368c5a6682b7148a91e282a7"} Sep 30 10:09:55 crc kubenswrapper[4730]: I0930 10:09:55.462007 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.461985211 podStartE2EDuration="2.461985211s" podCreationTimestamp="2025-09-30 10:09:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:09:55.455315526 +0000 UTC m=+1239.788575529" watchObservedRunningTime="2025-09-30 10:09:55.461985211 +0000 UTC m=+1239.795245204" Sep 30 10:09:56 crc kubenswrapper[4730]: I0930 10:09:56.395025 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50" path="/var/lib/kubelet/pods/f54f86f6-ab4f-4a8a-85e2-14af8e9cfe50/volumes" Sep 30 10:09:56 crc kubenswrapper[4730]: I0930 10:09:56.460752 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d1e9147-373e-4cfc-a181-eaf5e9d75e94","Type":"ContainerStarted","Data":"325380b4bb4a7bd6db8cdbee90130e7ab7ede78dd63a743ddb69e99e5949656b"} Sep 30 10:09:56 crc kubenswrapper[4730]: I0930 10:09:56.460791 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d1e9147-373e-4cfc-a181-eaf5e9d75e94","Type":"ContainerStarted","Data":"650a5547d9dd1cacb15881b37bae0f325aaa0954de7ada7117747331e9949af8"} Sep 30 10:09:56 crc kubenswrapper[4730]: I0930 10:09:56.481770 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.481750752 podStartE2EDuration="2.481750752s" podCreationTimestamp="2025-09-30 10:09:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:09:56.474922193 +0000 UTC m=+1240.808182186" watchObservedRunningTime="2025-09-30 10:09:56.481750752 +0000 UTC m=+1240.815010745" Sep 30 10:09:56 crc kubenswrapper[4730]: I0930 10:09:56.816712 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:56 crc kubenswrapper[4730]: I0930 10:09:56.932689 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-scripts\") pod \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " Sep 30 10:09:56 crc kubenswrapper[4730]: I0930 10:09:56.933846 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-config-data\") pod \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " Sep 30 10:09:56 crc kubenswrapper[4730]: I0930 10:09:56.933954 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cxhk\" (UniqueName: \"kubernetes.io/projected/043c0fe0-8c26-466a-bdde-0c6b9917e73f-kube-api-access-5cxhk\") pod \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " Sep 30 10:09:56 crc kubenswrapper[4730]: I0930 10:09:56.934017 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-combined-ca-bundle\") pod \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\" (UID: \"043c0fe0-8c26-466a-bdde-0c6b9917e73f\") " Sep 30 10:09:56 crc kubenswrapper[4730]: I0930 10:09:56.939024 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-scripts" (OuterVolumeSpecName: "scripts") pod "043c0fe0-8c26-466a-bdde-0c6b9917e73f" (UID: "043c0fe0-8c26-466a-bdde-0c6b9917e73f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:56 crc kubenswrapper[4730]: I0930 10:09:56.947287 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/043c0fe0-8c26-466a-bdde-0c6b9917e73f-kube-api-access-5cxhk" (OuterVolumeSpecName: "kube-api-access-5cxhk") pod "043c0fe0-8c26-466a-bdde-0c6b9917e73f" (UID: "043c0fe0-8c26-466a-bdde-0c6b9917e73f"). InnerVolumeSpecName "kube-api-access-5cxhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:09:56 crc kubenswrapper[4730]: I0930 10:09:56.964112 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "043c0fe0-8c26-466a-bdde-0c6b9917e73f" (UID: "043c0fe0-8c26-466a-bdde-0c6b9917e73f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:56 crc kubenswrapper[4730]: I0930 10:09:56.965294 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-config-data" (OuterVolumeSpecName: "config-data") pod "043c0fe0-8c26-466a-bdde-0c6b9917e73f" (UID: "043c0fe0-8c26-466a-bdde-0c6b9917e73f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.036588 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.036648 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.036663 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cxhk\" (UniqueName: \"kubernetes.io/projected/043c0fe0-8c26-466a-bdde-0c6b9917e73f-kube-api-access-5cxhk\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.036674 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/043c0fe0-8c26-466a-bdde-0c6b9917e73f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.473477 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-hjjbx" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.473478 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-hjjbx" event={"ID":"043c0fe0-8c26-466a-bdde-0c6b9917e73f","Type":"ContainerDied","Data":"19f1e9633a53aefd3c05d094a219ecb00dc65589e62ff061c51265db1a6692b6"} Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.473549 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19f1e9633a53aefd3c05d094a219ecb00dc65589e62ff061c51265db1a6692b6" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.540594 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 10:09:57 crc kubenswrapper[4730]: E0930 10:09:57.540993 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="043c0fe0-8c26-466a-bdde-0c6b9917e73f" containerName="nova-cell1-conductor-db-sync" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.541010 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="043c0fe0-8c26-466a-bdde-0c6b9917e73f" containerName="nova-cell1-conductor-db-sync" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.541218 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="043c0fe0-8c26-466a-bdde-0c6b9917e73f" containerName="nova-cell1-conductor-db-sync" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.542006 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.549968 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.550856 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.648655 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4521e8b6-3634-4d50-9050-0fccded8d973-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"4521e8b6-3634-4d50-9050-0fccded8d973\") " pod="openstack/nova-cell1-conductor-0" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.648741 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4521e8b6-3634-4d50-9050-0fccded8d973-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"4521e8b6-3634-4d50-9050-0fccded8d973\") " pod="openstack/nova-cell1-conductor-0" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.648930 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2d84w\" (UniqueName: \"kubernetes.io/projected/4521e8b6-3634-4d50-9050-0fccded8d973-kube-api-access-2d84w\") pod \"nova-cell1-conductor-0\" (UID: \"4521e8b6-3634-4d50-9050-0fccded8d973\") " pod="openstack/nova-cell1-conductor-0" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.751044 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4521e8b6-3634-4d50-9050-0fccded8d973-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"4521e8b6-3634-4d50-9050-0fccded8d973\") " pod="openstack/nova-cell1-conductor-0" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.751091 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4521e8b6-3634-4d50-9050-0fccded8d973-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"4521e8b6-3634-4d50-9050-0fccded8d973\") " pod="openstack/nova-cell1-conductor-0" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.751133 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2d84w\" (UniqueName: \"kubernetes.io/projected/4521e8b6-3634-4d50-9050-0fccded8d973-kube-api-access-2d84w\") pod \"nova-cell1-conductor-0\" (UID: \"4521e8b6-3634-4d50-9050-0fccded8d973\") " pod="openstack/nova-cell1-conductor-0" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.759823 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4521e8b6-3634-4d50-9050-0fccded8d973-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"4521e8b6-3634-4d50-9050-0fccded8d973\") " pod="openstack/nova-cell1-conductor-0" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.760050 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4521e8b6-3634-4d50-9050-0fccded8d973-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"4521e8b6-3634-4d50-9050-0fccded8d973\") " pod="openstack/nova-cell1-conductor-0" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.772462 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2d84w\" (UniqueName: \"kubernetes.io/projected/4521e8b6-3634-4d50-9050-0fccded8d973-kube-api-access-2d84w\") pod \"nova-cell1-conductor-0\" (UID: \"4521e8b6-3634-4d50-9050-0fccded8d973\") " pod="openstack/nova-cell1-conductor-0" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.839178 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.839225 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 10:09:57 crc kubenswrapper[4730]: I0930 10:09:57.866275 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 10:09:58 crc kubenswrapper[4730]: I0930 10:09:58.303346 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 10:09:58 crc kubenswrapper[4730]: W0930 10:09:58.308722 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4521e8b6_3634_4d50_9050_0fccded8d973.slice/crio-6ba650061bb980875a2c7ef05755cdde53117009937ebc1737169adca6ba1566 WatchSource:0}: Error finding container 6ba650061bb980875a2c7ef05755cdde53117009937ebc1737169adca6ba1566: Status 404 returned error can't find the container with id 6ba650061bb980875a2c7ef05755cdde53117009937ebc1737169adca6ba1566 Sep 30 10:09:58 crc kubenswrapper[4730]: I0930 10:09:58.483573 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"4521e8b6-3634-4d50-9050-0fccded8d973","Type":"ContainerStarted","Data":"1da429f33d4a0f9f1f720a7c8e40e2480e29465ed8265381f6924e91ebb38cea"} Sep 30 10:09:58 crc kubenswrapper[4730]: I0930 10:09:58.484021 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 30 10:09:58 crc kubenswrapper[4730]: I0930 10:09:58.484037 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"4521e8b6-3634-4d50-9050-0fccded8d973","Type":"ContainerStarted","Data":"6ba650061bb980875a2c7ef05755cdde53117009937ebc1737169adca6ba1566"} Sep 30 10:09:58 crc kubenswrapper[4730]: I0930 10:09:58.508172 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.508150602 podStartE2EDuration="1.508150602s" podCreationTimestamp="2025-09-30 10:09:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:09:58.500102002 +0000 UTC m=+1242.833362025" watchObservedRunningTime="2025-09-30 10:09:58.508150602 +0000 UTC m=+1242.841410595" Sep 30 10:09:58 crc kubenswrapper[4730]: I0930 10:09:58.820893 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 10:10:02 crc kubenswrapper[4730]: I0930 10:10:02.839095 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 10:10:02 crc kubenswrapper[4730]: I0930 10:10:02.839543 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 10:10:03 crc kubenswrapper[4730]: I0930 10:10:03.821088 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 10:10:03 crc kubenswrapper[4730]: I0930 10:10:03.850483 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 10:10:03 crc kubenswrapper[4730]: I0930 10:10:03.853788 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="21add49c-8259-4090-88ac-34b1b97149b7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 10:10:03 crc kubenswrapper[4730]: I0930 10:10:03.853876 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="21add49c-8259-4090-88ac-34b1b97149b7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 10:10:04 crc kubenswrapper[4730]: I0930 10:10:04.568333 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 10:10:04 crc kubenswrapper[4730]: I0930 10:10:04.815917 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 10:10:04 crc kubenswrapper[4730]: I0930 10:10:04.815965 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 10:10:05 crc kubenswrapper[4730]: I0930 10:10:05.897811 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3d1e9147-373e-4cfc-a181-eaf5e9d75e94" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 10:10:05 crc kubenswrapper[4730]: I0930 10:10:05.897833 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3d1e9147-373e-4cfc-a181-eaf5e9d75e94" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 10:10:07 crc kubenswrapper[4730]: I0930 10:10:07.891147 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 30 10:10:12 crc kubenswrapper[4730]: I0930 10:10:12.850294 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 10:10:12 crc kubenswrapper[4730]: I0930 10:10:12.851916 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 10:10:12 crc kubenswrapper[4730]: I0930 10:10:12.855409 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 10:10:13 crc kubenswrapper[4730]: I0930 10:10:13.635642 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 10:10:14 crc kubenswrapper[4730]: I0930 10:10:14.823198 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 10:10:14 crc kubenswrapper[4730]: I0930 10:10:14.823845 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 10:10:14 crc kubenswrapper[4730]: I0930 10:10:14.829846 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 10:10:14 crc kubenswrapper[4730]: I0930 10:10:14.839695 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.611180 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.661428 4730 generic.go:334] "Generic (PLEG): container finished" podID="a5263576-d080-4153-b198-1eb7caaebf06" containerID="7d4edf7d3019983206e72a280d3e7d7fe0fa083783eb193ec4f96f7be87bc249" exitCode=137 Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.661463 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.661484 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a5263576-d080-4153-b198-1eb7caaebf06","Type":"ContainerDied","Data":"7d4edf7d3019983206e72a280d3e7d7fe0fa083783eb193ec4f96f7be87bc249"} Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.662965 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a5263576-d080-4153-b198-1eb7caaebf06","Type":"ContainerDied","Data":"af0e0e980b8abea3dd86624f791aa32dbc3a697a6b1bffba8921cc3520301979"} Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.662988 4730 scope.go:117] "RemoveContainer" containerID="7d4edf7d3019983206e72a280d3e7d7fe0fa083783eb193ec4f96f7be87bc249" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.663289 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.670039 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.688059 4730 scope.go:117] "RemoveContainer" containerID="7d4edf7d3019983206e72a280d3e7d7fe0fa083783eb193ec4f96f7be87bc249" Sep 30 10:10:15 crc kubenswrapper[4730]: E0930 10:10:15.688555 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d4edf7d3019983206e72a280d3e7d7fe0fa083783eb193ec4f96f7be87bc249\": container with ID starting with 7d4edf7d3019983206e72a280d3e7d7fe0fa083783eb193ec4f96f7be87bc249 not found: ID does not exist" containerID="7d4edf7d3019983206e72a280d3e7d7fe0fa083783eb193ec4f96f7be87bc249" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.688682 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d4edf7d3019983206e72a280d3e7d7fe0fa083783eb193ec4f96f7be87bc249"} err="failed to get container status \"7d4edf7d3019983206e72a280d3e7d7fe0fa083783eb193ec4f96f7be87bc249\": rpc error: code = NotFound desc = could not find container \"7d4edf7d3019983206e72a280d3e7d7fe0fa083783eb193ec4f96f7be87bc249\": container with ID starting with 7d4edf7d3019983206e72a280d3e7d7fe0fa083783eb193ec4f96f7be87bc249 not found: ID does not exist" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.699845 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5263576-d080-4153-b198-1eb7caaebf06-config-data\") pod \"a5263576-d080-4153-b198-1eb7caaebf06\" (UID: \"a5263576-d080-4153-b198-1eb7caaebf06\") " Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.699937 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5263576-d080-4153-b198-1eb7caaebf06-combined-ca-bundle\") pod \"a5263576-d080-4153-b198-1eb7caaebf06\" (UID: \"a5263576-d080-4153-b198-1eb7caaebf06\") " Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.700032 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnpvl\" (UniqueName: \"kubernetes.io/projected/a5263576-d080-4153-b198-1eb7caaebf06-kube-api-access-fnpvl\") pod \"a5263576-d080-4153-b198-1eb7caaebf06\" (UID: \"a5263576-d080-4153-b198-1eb7caaebf06\") " Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.704988 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5263576-d080-4153-b198-1eb7caaebf06-kube-api-access-fnpvl" (OuterVolumeSpecName: "kube-api-access-fnpvl") pod "a5263576-d080-4153-b198-1eb7caaebf06" (UID: "a5263576-d080-4153-b198-1eb7caaebf06"). InnerVolumeSpecName "kube-api-access-fnpvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.731944 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5263576-d080-4153-b198-1eb7caaebf06-config-data" (OuterVolumeSpecName: "config-data") pod "a5263576-d080-4153-b198-1eb7caaebf06" (UID: "a5263576-d080-4153-b198-1eb7caaebf06"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.752144 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5263576-d080-4153-b198-1eb7caaebf06-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5263576-d080-4153-b198-1eb7caaebf06" (UID: "a5263576-d080-4153-b198-1eb7caaebf06"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.802114 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5263576-d080-4153-b198-1eb7caaebf06-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.802420 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5263576-d080-4153-b198-1eb7caaebf06-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.802439 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnpvl\" (UniqueName: \"kubernetes.io/projected/a5263576-d080-4153-b198-1eb7caaebf06-kube-api-access-fnpvl\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.861077 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5fb4859d7f-dw2rw"] Sep 30 10:10:15 crc kubenswrapper[4730]: E0930 10:10:15.861671 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5263576-d080-4153-b198-1eb7caaebf06" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.861688 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5263576-d080-4153-b198-1eb7caaebf06" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.861903 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5263576-d080-4153-b198-1eb7caaebf06" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.863548 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.878638 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fb4859d7f-dw2rw"] Sep 30 10:10:15 crc kubenswrapper[4730]: I0930 10:10:15.997488 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.005075 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-config\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.005123 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-dns-svc\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.005207 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-ovsdbserver-sb\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.005228 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7tfr\" (UniqueName: \"kubernetes.io/projected/7a53a8ac-4c91-46a7-a299-f63ec271774b-kube-api-access-k7tfr\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.005262 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-ovsdbserver-nb\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.012765 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.020030 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.021383 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.023634 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.024654 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.024815 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.034044 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.106816 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvdtl\" (UniqueName: \"kubernetes.io/projected/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-kube-api-access-gvdtl\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.106867 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.106899 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.107020 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-config\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.107058 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.107081 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-dns-svc\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.107130 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-ovsdbserver-sb\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.107150 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7tfr\" (UniqueName: \"kubernetes.io/projected/7a53a8ac-4c91-46a7-a299-f63ec271774b-kube-api-access-k7tfr\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.107184 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.107202 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-ovsdbserver-nb\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.108111 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-ovsdbserver-nb\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.108117 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-config\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.108139 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-ovsdbserver-sb\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.108848 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-dns-svc\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.125584 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7tfr\" (UniqueName: \"kubernetes.io/projected/7a53a8ac-4c91-46a7-a299-f63ec271774b-kube-api-access-k7tfr\") pod \"dnsmasq-dns-5fb4859d7f-dw2rw\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.208374 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.208525 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.208584 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvdtl\" (UniqueName: \"kubernetes.io/projected/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-kube-api-access-gvdtl\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.208627 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.208664 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.211898 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.214245 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.214281 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.214752 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.217775 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.227760 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvdtl\" (UniqueName: \"kubernetes.io/projected/ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c-kube-api-access-gvdtl\") pod \"nova-cell1-novncproxy-0\" (UID: \"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.343428 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.419662 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5263576-d080-4153-b198-1eb7caaebf06" path="/var/lib/kubelet/pods/a5263576-d080-4153-b198-1eb7caaebf06/volumes" Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.697380 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fb4859d7f-dw2rw"] Sep 30 10:10:16 crc kubenswrapper[4730]: W0930 10:10:16.705375 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a53a8ac_4c91_46a7_a299_f63ec271774b.slice/crio-501f871db4eb0dbfb4f3efba4ba9bd2499e52940cffba931a420cbffa8fa4b4c WatchSource:0}: Error finding container 501f871db4eb0dbfb4f3efba4ba9bd2499e52940cffba931a420cbffa8fa4b4c: Status 404 returned error can't find the container with id 501f871db4eb0dbfb4f3efba4ba9bd2499e52940cffba931a420cbffa8fa4b4c Sep 30 10:10:16 crc kubenswrapper[4730]: I0930 10:10:16.855308 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 10:10:16 crc kubenswrapper[4730]: W0930 10:10:16.867792 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded5ae6a3_8d5a_4fd9_8de9_380e7a54ce4c.slice/crio-8af07d50bf2516f50a81f42b3937efb99a3f526b0f255a40f52386836a75baaa WatchSource:0}: Error finding container 8af07d50bf2516f50a81f42b3937efb99a3f526b0f255a40f52386836a75baaa: Status 404 returned error can't find the container with id 8af07d50bf2516f50a81f42b3937efb99a3f526b0f255a40f52386836a75baaa Sep 30 10:10:17 crc kubenswrapper[4730]: I0930 10:10:17.683334 4730 generic.go:334] "Generic (PLEG): container finished" podID="7a53a8ac-4c91-46a7-a299-f63ec271774b" containerID="b2db242ed5b1d797e7c38a18b0ddf9f7d7aced1909d532c43579d0cda33bc6cf" exitCode=0 Sep 30 10:10:17 crc kubenswrapper[4730]: I0930 10:10:17.683393 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" event={"ID":"7a53a8ac-4c91-46a7-a299-f63ec271774b","Type":"ContainerDied","Data":"b2db242ed5b1d797e7c38a18b0ddf9f7d7aced1909d532c43579d0cda33bc6cf"} Sep 30 10:10:17 crc kubenswrapper[4730]: I0930 10:10:17.683739 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" event={"ID":"7a53a8ac-4c91-46a7-a299-f63ec271774b","Type":"ContainerStarted","Data":"501f871db4eb0dbfb4f3efba4ba9bd2499e52940cffba931a420cbffa8fa4b4c"} Sep 30 10:10:17 crc kubenswrapper[4730]: I0930 10:10:17.686258 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c","Type":"ContainerStarted","Data":"16798e99b1921900e79ca6aea402bd185a60ca4c23233dca6224e06ad9476477"} Sep 30 10:10:17 crc kubenswrapper[4730]: I0930 10:10:17.686307 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c","Type":"ContainerStarted","Data":"8af07d50bf2516f50a81f42b3937efb99a3f526b0f255a40f52386836a75baaa"} Sep 30 10:10:17 crc kubenswrapper[4730]: I0930 10:10:17.758886 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.758864857 podStartE2EDuration="2.758864857s" podCreationTimestamp="2025-09-30 10:10:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:10:17.729073257 +0000 UTC m=+1262.062333250" watchObservedRunningTime="2025-09-30 10:10:17.758864857 +0000 UTC m=+1262.092124850" Sep 30 10:10:18 crc kubenswrapper[4730]: I0930 10:10:18.543250 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:10:18 crc kubenswrapper[4730]: I0930 10:10:18.549075 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="ceilometer-central-agent" containerID="cri-o://f314f1f1ee72b0424cc3d973d3a7162d6cab86d40721a9a6d85d1bf05ee6225d" gracePeriod=30 Sep 30 10:10:18 crc kubenswrapper[4730]: I0930 10:10:18.549644 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="proxy-httpd" containerID="cri-o://761137ad28d52bd9509b460a9ac82ad2ccb2003086aa456fd838e6219e521dac" gracePeriod=30 Sep 30 10:10:18 crc kubenswrapper[4730]: I0930 10:10:18.549742 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="ceilometer-notification-agent" containerID="cri-o://26c3f8acd99c58dd7c7ffa2e39768b3752aa2e2e49987dcdbe3d46a3d76d528b" gracePeriod=30 Sep 30 10:10:18 crc kubenswrapper[4730]: I0930 10:10:18.549882 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="sg-core" containerID="cri-o://7ed151c7840c898d7d2eb7e101da3a438c14fec51d25187a09a03713b50ca041" gracePeriod=30 Sep 30 10:10:18 crc kubenswrapper[4730]: I0930 10:10:18.696352 4730 generic.go:334] "Generic (PLEG): container finished" podID="994beb56-6886-40ad-88f9-98e66828cafd" containerID="7ed151c7840c898d7d2eb7e101da3a438c14fec51d25187a09a03713b50ca041" exitCode=2 Sep 30 10:10:18 crc kubenswrapper[4730]: I0930 10:10:18.696479 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"994beb56-6886-40ad-88f9-98e66828cafd","Type":"ContainerDied","Data":"7ed151c7840c898d7d2eb7e101da3a438c14fec51d25187a09a03713b50ca041"} Sep 30 10:10:18 crc kubenswrapper[4730]: I0930 10:10:18.703005 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" event={"ID":"7a53a8ac-4c91-46a7-a299-f63ec271774b","Type":"ContainerStarted","Data":"e68959d125802b7c3ccbadacb9e5bc923ab54b6fb0c03c1e0ed1bacd4e2649a8"} Sep 30 10:10:18 crc kubenswrapper[4730]: I0930 10:10:18.703076 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:18 crc kubenswrapper[4730]: I0930 10:10:18.730861 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" podStartSLOduration=3.730841828 podStartE2EDuration="3.730841828s" podCreationTimestamp="2025-09-30 10:10:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:10:18.728335952 +0000 UTC m=+1263.061595945" watchObservedRunningTime="2025-09-30 10:10:18.730841828 +0000 UTC m=+1263.064101831" Sep 30 10:10:18 crc kubenswrapper[4730]: I0930 10:10:18.814130 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:10:18 crc kubenswrapper[4730]: I0930 10:10:18.814385 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3d1e9147-373e-4cfc-a181-eaf5e9d75e94" containerName="nova-api-log" containerID="cri-o://650a5547d9dd1cacb15881b37bae0f325aaa0954de7ada7117747331e9949af8" gracePeriod=30 Sep 30 10:10:18 crc kubenswrapper[4730]: I0930 10:10:18.814503 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3d1e9147-373e-4cfc-a181-eaf5e9d75e94" containerName="nova-api-api" containerID="cri-o://325380b4bb4a7bd6db8cdbee90130e7ab7ede78dd63a743ddb69e99e5949656b" gracePeriod=30 Sep 30 10:10:19 crc kubenswrapper[4730]: I0930 10:10:19.713869 4730 generic.go:334] "Generic (PLEG): container finished" podID="3d1e9147-373e-4cfc-a181-eaf5e9d75e94" containerID="650a5547d9dd1cacb15881b37bae0f325aaa0954de7ada7117747331e9949af8" exitCode=143 Sep 30 10:10:19 crc kubenswrapper[4730]: I0930 10:10:19.713948 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d1e9147-373e-4cfc-a181-eaf5e9d75e94","Type":"ContainerDied","Data":"650a5547d9dd1cacb15881b37bae0f325aaa0954de7ada7117747331e9949af8"} Sep 30 10:10:19 crc kubenswrapper[4730]: I0930 10:10:19.717287 4730 generic.go:334] "Generic (PLEG): container finished" podID="994beb56-6886-40ad-88f9-98e66828cafd" containerID="761137ad28d52bd9509b460a9ac82ad2ccb2003086aa456fd838e6219e521dac" exitCode=0 Sep 30 10:10:19 crc kubenswrapper[4730]: I0930 10:10:19.717428 4730 generic.go:334] "Generic (PLEG): container finished" podID="994beb56-6886-40ad-88f9-98e66828cafd" containerID="f314f1f1ee72b0424cc3d973d3a7162d6cab86d40721a9a6d85d1bf05ee6225d" exitCode=0 Sep 30 10:10:19 crc kubenswrapper[4730]: I0930 10:10:19.717329 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"994beb56-6886-40ad-88f9-98e66828cafd","Type":"ContainerDied","Data":"761137ad28d52bd9509b460a9ac82ad2ccb2003086aa456fd838e6219e521dac"} Sep 30 10:10:19 crc kubenswrapper[4730]: I0930 10:10:19.717542 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"994beb56-6886-40ad-88f9-98e66828cafd","Type":"ContainerDied","Data":"f314f1f1ee72b0424cc3d973d3a7162d6cab86d40721a9a6d85d1bf05ee6225d"} Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.444656 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.606994 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rtgn\" (UniqueName: \"kubernetes.io/projected/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-kube-api-access-2rtgn\") pod \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.607127 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-combined-ca-bundle\") pod \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.607198 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-logs\") pod \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.607245 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-config-data\") pod \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\" (UID: \"3d1e9147-373e-4cfc-a181-eaf5e9d75e94\") " Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.607651 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-logs" (OuterVolumeSpecName: "logs") pod "3d1e9147-373e-4cfc-a181-eaf5e9d75e94" (UID: "3d1e9147-373e-4cfc-a181-eaf5e9d75e94"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.614249 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-kube-api-access-2rtgn" (OuterVolumeSpecName: "kube-api-access-2rtgn") pod "3d1e9147-373e-4cfc-a181-eaf5e9d75e94" (UID: "3d1e9147-373e-4cfc-a181-eaf5e9d75e94"). InnerVolumeSpecName "kube-api-access-2rtgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.635003 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d1e9147-373e-4cfc-a181-eaf5e9d75e94" (UID: "3d1e9147-373e-4cfc-a181-eaf5e9d75e94"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.657015 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-config-data" (OuterVolumeSpecName: "config-data") pod "3d1e9147-373e-4cfc-a181-eaf5e9d75e94" (UID: "3d1e9147-373e-4cfc-a181-eaf5e9d75e94"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.709362 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.709414 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.709430 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.709441 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rtgn\" (UniqueName: \"kubernetes.io/projected/3d1e9147-373e-4cfc-a181-eaf5e9d75e94-kube-api-access-2rtgn\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.729005 4730 generic.go:334] "Generic (PLEG): container finished" podID="3d1e9147-373e-4cfc-a181-eaf5e9d75e94" containerID="325380b4bb4a7bd6db8cdbee90130e7ab7ede78dd63a743ddb69e99e5949656b" exitCode=0 Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.729061 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.729052 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d1e9147-373e-4cfc-a181-eaf5e9d75e94","Type":"ContainerDied","Data":"325380b4bb4a7bd6db8cdbee90130e7ab7ede78dd63a743ddb69e99e5949656b"} Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.729154 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3d1e9147-373e-4cfc-a181-eaf5e9d75e94","Type":"ContainerDied","Data":"42e91d1ba41417e6fa95e8666671706ae42dd425368c5a6682b7148a91e282a7"} Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.729174 4730 scope.go:117] "RemoveContainer" containerID="325380b4bb4a7bd6db8cdbee90130e7ab7ede78dd63a743ddb69e99e5949656b" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.768933 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.781816 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.790461 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.790783 4730 scope.go:117] "RemoveContainer" containerID="650a5547d9dd1cacb15881b37bae0f325aaa0954de7ada7117747331e9949af8" Sep 30 10:10:20 crc kubenswrapper[4730]: E0930 10:10:20.790903 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d1e9147-373e-4cfc-a181-eaf5e9d75e94" containerName="nova-api-api" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.790928 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d1e9147-373e-4cfc-a181-eaf5e9d75e94" containerName="nova-api-api" Sep 30 10:10:20 crc kubenswrapper[4730]: E0930 10:10:20.790948 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d1e9147-373e-4cfc-a181-eaf5e9d75e94" containerName="nova-api-log" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.790957 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d1e9147-373e-4cfc-a181-eaf5e9d75e94" containerName="nova-api-log" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.791161 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d1e9147-373e-4cfc-a181-eaf5e9d75e94" containerName="nova-api-api" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.791199 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d1e9147-373e-4cfc-a181-eaf5e9d75e94" containerName="nova-api-log" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.794349 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.796603 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.796754 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.797125 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.815151 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.846889 4730 scope.go:117] "RemoveContainer" containerID="325380b4bb4a7bd6db8cdbee90130e7ab7ede78dd63a743ddb69e99e5949656b" Sep 30 10:10:20 crc kubenswrapper[4730]: E0930 10:10:20.847718 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"325380b4bb4a7bd6db8cdbee90130e7ab7ede78dd63a743ddb69e99e5949656b\": container with ID starting with 325380b4bb4a7bd6db8cdbee90130e7ab7ede78dd63a743ddb69e99e5949656b not found: ID does not exist" containerID="325380b4bb4a7bd6db8cdbee90130e7ab7ede78dd63a743ddb69e99e5949656b" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.847781 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"325380b4bb4a7bd6db8cdbee90130e7ab7ede78dd63a743ddb69e99e5949656b"} err="failed to get container status \"325380b4bb4a7bd6db8cdbee90130e7ab7ede78dd63a743ddb69e99e5949656b\": rpc error: code = NotFound desc = could not find container \"325380b4bb4a7bd6db8cdbee90130e7ab7ede78dd63a743ddb69e99e5949656b\": container with ID starting with 325380b4bb4a7bd6db8cdbee90130e7ab7ede78dd63a743ddb69e99e5949656b not found: ID does not exist" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.847815 4730 scope.go:117] "RemoveContainer" containerID="650a5547d9dd1cacb15881b37bae0f325aaa0954de7ada7117747331e9949af8" Sep 30 10:10:20 crc kubenswrapper[4730]: E0930 10:10:20.848596 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"650a5547d9dd1cacb15881b37bae0f325aaa0954de7ada7117747331e9949af8\": container with ID starting with 650a5547d9dd1cacb15881b37bae0f325aaa0954de7ada7117747331e9949af8 not found: ID does not exist" containerID="650a5547d9dd1cacb15881b37bae0f325aaa0954de7ada7117747331e9949af8" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.848678 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"650a5547d9dd1cacb15881b37bae0f325aaa0954de7ada7117747331e9949af8"} err="failed to get container status \"650a5547d9dd1cacb15881b37bae0f325aaa0954de7ada7117747331e9949af8\": rpc error: code = NotFound desc = could not find container \"650a5547d9dd1cacb15881b37bae0f325aaa0954de7ada7117747331e9949af8\": container with ID starting with 650a5547d9dd1cacb15881b37bae0f325aaa0954de7ada7117747331e9949af8 not found: ID does not exist" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.913105 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-public-tls-certs\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.913622 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a503514f-2964-4836-836d-8987de4f1828-logs\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.913698 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.913725 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.913792 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q78lp\" (UniqueName: \"kubernetes.io/projected/a503514f-2964-4836-836d-8987de4f1828-kube-api-access-q78lp\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:20 crc kubenswrapper[4730]: I0930 10:10:20.913904 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-config-data\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.015387 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a503514f-2964-4836-836d-8987de4f1828-logs\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.015438 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.015454 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.015481 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q78lp\" (UniqueName: \"kubernetes.io/projected/a503514f-2964-4836-836d-8987de4f1828-kube-api-access-q78lp\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.015509 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-config-data\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.015587 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-public-tls-certs\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.016318 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a503514f-2964-4836-836d-8987de4f1828-logs\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.019251 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.019403 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-config-data\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.019865 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.020423 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-public-tls-certs\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.034084 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q78lp\" (UniqueName: \"kubernetes.io/projected/a503514f-2964-4836-836d-8987de4f1828-kube-api-access-q78lp\") pod \"nova-api-0\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.121257 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.343704 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.574291 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.741393 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a503514f-2964-4836-836d-8987de4f1828","Type":"ContainerStarted","Data":"2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32"} Sep 30 10:10:21 crc kubenswrapper[4730]: I0930 10:10:21.741438 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a503514f-2964-4836-836d-8987de4f1828","Type":"ContainerStarted","Data":"491d54791661faef63bec2c693ed59180fc1c46811541cc0457a82e4d926a299"} Sep 30 10:10:22 crc kubenswrapper[4730]: I0930 10:10:22.391031 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d1e9147-373e-4cfc-a181-eaf5e9d75e94" path="/var/lib/kubelet/pods/3d1e9147-373e-4cfc-a181-eaf5e9d75e94/volumes" Sep 30 10:10:22 crc kubenswrapper[4730]: I0930 10:10:22.755638 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a503514f-2964-4836-836d-8987de4f1828","Type":"ContainerStarted","Data":"065aef1bbe53169e007bb6066d696bcf175aef85a227702280a17d958d7846e4"} Sep 30 10:10:22 crc kubenswrapper[4730]: I0930 10:10:22.783316 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.7832957179999998 podStartE2EDuration="2.783295718s" podCreationTimestamp="2025-09-30 10:10:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:10:22.773500501 +0000 UTC m=+1267.106760504" watchObservedRunningTime="2025-09-30 10:10:22.783295718 +0000 UTC m=+1267.116555711" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.480234 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.596257 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-scripts\") pod \"994beb56-6886-40ad-88f9-98e66828cafd\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.596555 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/994beb56-6886-40ad-88f9-98e66828cafd-log-httpd\") pod \"994beb56-6886-40ad-88f9-98e66828cafd\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.596700 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-ceilometer-tls-certs\") pod \"994beb56-6886-40ad-88f9-98e66828cafd\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.596839 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-sg-core-conf-yaml\") pod \"994beb56-6886-40ad-88f9-98e66828cafd\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.596951 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcmbh\" (UniqueName: \"kubernetes.io/projected/994beb56-6886-40ad-88f9-98e66828cafd-kube-api-access-tcmbh\") pod \"994beb56-6886-40ad-88f9-98e66828cafd\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.597042 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-combined-ca-bundle\") pod \"994beb56-6886-40ad-88f9-98e66828cafd\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.597144 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/994beb56-6886-40ad-88f9-98e66828cafd-run-httpd\") pod \"994beb56-6886-40ad-88f9-98e66828cafd\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.597311 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-config-data\") pod \"994beb56-6886-40ad-88f9-98e66828cafd\" (UID: \"994beb56-6886-40ad-88f9-98e66828cafd\") " Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.599116 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/994beb56-6886-40ad-88f9-98e66828cafd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "994beb56-6886-40ad-88f9-98e66828cafd" (UID: "994beb56-6886-40ad-88f9-98e66828cafd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.599332 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/994beb56-6886-40ad-88f9-98e66828cafd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "994beb56-6886-40ad-88f9-98e66828cafd" (UID: "994beb56-6886-40ad-88f9-98e66828cafd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.616097 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/994beb56-6886-40ad-88f9-98e66828cafd-kube-api-access-tcmbh" (OuterVolumeSpecName: "kube-api-access-tcmbh") pod "994beb56-6886-40ad-88f9-98e66828cafd" (UID: "994beb56-6886-40ad-88f9-98e66828cafd"). InnerVolumeSpecName "kube-api-access-tcmbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.616471 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-scripts" (OuterVolumeSpecName: "scripts") pod "994beb56-6886-40ad-88f9-98e66828cafd" (UID: "994beb56-6886-40ad-88f9-98e66828cafd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.644359 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "994beb56-6886-40ad-88f9-98e66828cafd" (UID: "994beb56-6886-40ad-88f9-98e66828cafd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.656055 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "994beb56-6886-40ad-88f9-98e66828cafd" (UID: "994beb56-6886-40ad-88f9-98e66828cafd"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.681271 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "994beb56-6886-40ad-88f9-98e66828cafd" (UID: "994beb56-6886-40ad-88f9-98e66828cafd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.699698 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.699844 4730 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/994beb56-6886-40ad-88f9-98e66828cafd-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.699876 4730 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.699897 4730 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.699914 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcmbh\" (UniqueName: \"kubernetes.io/projected/994beb56-6886-40ad-88f9-98e66828cafd-kube-api-access-tcmbh\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.699929 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.699944 4730 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/994beb56-6886-40ad-88f9-98e66828cafd-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.700700 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-config-data" (OuterVolumeSpecName: "config-data") pod "994beb56-6886-40ad-88f9-98e66828cafd" (UID: "994beb56-6886-40ad-88f9-98e66828cafd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.792402 4730 generic.go:334] "Generic (PLEG): container finished" podID="994beb56-6886-40ad-88f9-98e66828cafd" containerID="26c3f8acd99c58dd7c7ffa2e39768b3752aa2e2e49987dcdbe3d46a3d76d528b" exitCode=0 Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.792446 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"994beb56-6886-40ad-88f9-98e66828cafd","Type":"ContainerDied","Data":"26c3f8acd99c58dd7c7ffa2e39768b3752aa2e2e49987dcdbe3d46a3d76d528b"} Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.792469 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"994beb56-6886-40ad-88f9-98e66828cafd","Type":"ContainerDied","Data":"621737f982e8b581cc91b698cfbf5fbae1ccd0deef2c4bd6066bb48d9a736d0f"} Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.792484 4730 scope.go:117] "RemoveContainer" containerID="761137ad28d52bd9509b460a9ac82ad2ccb2003086aa456fd838e6219e521dac" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.792597 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.802142 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/994beb56-6886-40ad-88f9-98e66828cafd-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.826806 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.854204 4730 scope.go:117] "RemoveContainer" containerID="7ed151c7840c898d7d2eb7e101da3a438c14fec51d25187a09a03713b50ca041" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.855492 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.871114 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:10:24 crc kubenswrapper[4730]: E0930 10:10:24.871682 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="proxy-httpd" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.871706 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="proxy-httpd" Sep 30 10:10:24 crc kubenswrapper[4730]: E0930 10:10:24.871744 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="ceilometer-central-agent" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.871755 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="ceilometer-central-agent" Sep 30 10:10:24 crc kubenswrapper[4730]: E0930 10:10:24.871768 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="sg-core" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.871776 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="sg-core" Sep 30 10:10:24 crc kubenswrapper[4730]: E0930 10:10:24.871812 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="ceilometer-notification-agent" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.871822 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="ceilometer-notification-agent" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.872049 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="ceilometer-central-agent" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.872081 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="proxy-httpd" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.872103 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="sg-core" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.872117 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="994beb56-6886-40ad-88f9-98e66828cafd" containerName="ceilometer-notification-agent" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.874799 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.877319 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.877516 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.878401 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.881905 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.893499 4730 scope.go:117] "RemoveContainer" containerID="26c3f8acd99c58dd7c7ffa2e39768b3752aa2e2e49987dcdbe3d46a3d76d528b" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.923390 4730 scope.go:117] "RemoveContainer" containerID="f314f1f1ee72b0424cc3d973d3a7162d6cab86d40721a9a6d85d1bf05ee6225d" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.947306 4730 scope.go:117] "RemoveContainer" containerID="761137ad28d52bd9509b460a9ac82ad2ccb2003086aa456fd838e6219e521dac" Sep 30 10:10:24 crc kubenswrapper[4730]: E0930 10:10:24.947867 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"761137ad28d52bd9509b460a9ac82ad2ccb2003086aa456fd838e6219e521dac\": container with ID starting with 761137ad28d52bd9509b460a9ac82ad2ccb2003086aa456fd838e6219e521dac not found: ID does not exist" containerID="761137ad28d52bd9509b460a9ac82ad2ccb2003086aa456fd838e6219e521dac" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.947909 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"761137ad28d52bd9509b460a9ac82ad2ccb2003086aa456fd838e6219e521dac"} err="failed to get container status \"761137ad28d52bd9509b460a9ac82ad2ccb2003086aa456fd838e6219e521dac\": rpc error: code = NotFound desc = could not find container \"761137ad28d52bd9509b460a9ac82ad2ccb2003086aa456fd838e6219e521dac\": container with ID starting with 761137ad28d52bd9509b460a9ac82ad2ccb2003086aa456fd838e6219e521dac not found: ID does not exist" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.947949 4730 scope.go:117] "RemoveContainer" containerID="7ed151c7840c898d7d2eb7e101da3a438c14fec51d25187a09a03713b50ca041" Sep 30 10:10:24 crc kubenswrapper[4730]: E0930 10:10:24.948334 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ed151c7840c898d7d2eb7e101da3a438c14fec51d25187a09a03713b50ca041\": container with ID starting with 7ed151c7840c898d7d2eb7e101da3a438c14fec51d25187a09a03713b50ca041 not found: ID does not exist" containerID="7ed151c7840c898d7d2eb7e101da3a438c14fec51d25187a09a03713b50ca041" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.948438 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ed151c7840c898d7d2eb7e101da3a438c14fec51d25187a09a03713b50ca041"} err="failed to get container status \"7ed151c7840c898d7d2eb7e101da3a438c14fec51d25187a09a03713b50ca041\": rpc error: code = NotFound desc = could not find container \"7ed151c7840c898d7d2eb7e101da3a438c14fec51d25187a09a03713b50ca041\": container with ID starting with 7ed151c7840c898d7d2eb7e101da3a438c14fec51d25187a09a03713b50ca041 not found: ID does not exist" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.948532 4730 scope.go:117] "RemoveContainer" containerID="26c3f8acd99c58dd7c7ffa2e39768b3752aa2e2e49987dcdbe3d46a3d76d528b" Sep 30 10:10:24 crc kubenswrapper[4730]: E0930 10:10:24.948988 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26c3f8acd99c58dd7c7ffa2e39768b3752aa2e2e49987dcdbe3d46a3d76d528b\": container with ID starting with 26c3f8acd99c58dd7c7ffa2e39768b3752aa2e2e49987dcdbe3d46a3d76d528b not found: ID does not exist" containerID="26c3f8acd99c58dd7c7ffa2e39768b3752aa2e2e49987dcdbe3d46a3d76d528b" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.949035 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26c3f8acd99c58dd7c7ffa2e39768b3752aa2e2e49987dcdbe3d46a3d76d528b"} err="failed to get container status \"26c3f8acd99c58dd7c7ffa2e39768b3752aa2e2e49987dcdbe3d46a3d76d528b\": rpc error: code = NotFound desc = could not find container \"26c3f8acd99c58dd7c7ffa2e39768b3752aa2e2e49987dcdbe3d46a3d76d528b\": container with ID starting with 26c3f8acd99c58dd7c7ffa2e39768b3752aa2e2e49987dcdbe3d46a3d76d528b not found: ID does not exist" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.949049 4730 scope.go:117] "RemoveContainer" containerID="f314f1f1ee72b0424cc3d973d3a7162d6cab86d40721a9a6d85d1bf05ee6225d" Sep 30 10:10:24 crc kubenswrapper[4730]: E0930 10:10:24.949317 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f314f1f1ee72b0424cc3d973d3a7162d6cab86d40721a9a6d85d1bf05ee6225d\": container with ID starting with f314f1f1ee72b0424cc3d973d3a7162d6cab86d40721a9a6d85d1bf05ee6225d not found: ID does not exist" containerID="f314f1f1ee72b0424cc3d973d3a7162d6cab86d40721a9a6d85d1bf05ee6225d" Sep 30 10:10:24 crc kubenswrapper[4730]: I0930 10:10:24.949410 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f314f1f1ee72b0424cc3d973d3a7162d6cab86d40721a9a6d85d1bf05ee6225d"} err="failed to get container status \"f314f1f1ee72b0424cc3d973d3a7162d6cab86d40721a9a6d85d1bf05ee6225d\": rpc error: code = NotFound desc = could not find container \"f314f1f1ee72b0424cc3d973d3a7162d6cab86d40721a9a6d85d1bf05ee6225d\": container with ID starting with f314f1f1ee72b0424cc3d973d3a7162d6cab86d40721a9a6d85d1bf05ee6225d not found: ID does not exist" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.010420 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.010474 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wktm\" (UniqueName: \"kubernetes.io/projected/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-kube-api-access-4wktm\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.010518 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-run-httpd\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.010730 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-config-data\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.010877 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-scripts\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.010936 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.011085 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-log-httpd\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.011229 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.112983 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.113066 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.113090 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wktm\" (UniqueName: \"kubernetes.io/projected/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-kube-api-access-4wktm\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.113126 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-run-httpd\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.113177 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-config-data\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.113216 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-scripts\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.113245 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.113605 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-run-httpd\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.113722 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-log-httpd\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.113941 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-log-httpd\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.117067 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-config-data\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.117339 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.125702 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.126906 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.127785 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-scripts\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.128092 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wktm\" (UniqueName: \"kubernetes.io/projected/4eb0bfb8-0c6a-4d5f-9b85-e06888511203-kube-api-access-4wktm\") pod \"ceilometer-0\" (UID: \"4eb0bfb8-0c6a-4d5f-9b85-e06888511203\") " pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.206534 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.628129 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 10:10:25 crc kubenswrapper[4730]: I0930 10:10:25.802837 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eb0bfb8-0c6a-4d5f-9b85-e06888511203","Type":"ContainerStarted","Data":"c1355aa2edd02fc2b861dd15a01fdf2656bd07bcc10abfd50c53e5c8010b1f83"} Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.219714 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.319714 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb"] Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.319951 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" podUID="64067ae9-7904-4133-9c1f-f09a64dd3209" containerName="dnsmasq-dns" containerID="cri-o://b369be31d719c6ab268dc519c85726e61f2d1a1f8e2769006f34e26002ae7c98" gracePeriod=10 Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.346370 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.402919 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="994beb56-6886-40ad-88f9-98e66828cafd" path="/var/lib/kubelet/pods/994beb56-6886-40ad-88f9-98e66828cafd/volumes" Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.403674 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.817135 4730 generic.go:334] "Generic (PLEG): container finished" podID="64067ae9-7904-4133-9c1f-f09a64dd3209" containerID="b369be31d719c6ab268dc519c85726e61f2d1a1f8e2769006f34e26002ae7c98" exitCode=0 Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.817436 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" event={"ID":"64067ae9-7904-4133-9c1f-f09a64dd3209","Type":"ContainerDied","Data":"b369be31d719c6ab268dc519c85726e61f2d1a1f8e2769006f34e26002ae7c98"} Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.817486 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" event={"ID":"64067ae9-7904-4133-9c1f-f09a64dd3209","Type":"ContainerDied","Data":"83fac0ba688303ed39ea4bbd1dd8314a3deecf85c8a481e6289848d3cee1e9bc"} Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.817500 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83fac0ba688303ed39ea4bbd1dd8314a3deecf85c8a481e6289848d3cee1e9bc" Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.819693 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eb0bfb8-0c6a-4d5f-9b85-e06888511203","Type":"ContainerStarted","Data":"f3d88db48035b37bfdb31c723da818292dbf9c48a773b3dda9c933d8e369ce89"} Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.819751 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eb0bfb8-0c6a-4d5f-9b85-e06888511203","Type":"ContainerStarted","Data":"94923d839e536c582c3a7566c9c03cec95370b67e9aeef4324b74b142aab2241"} Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.827407 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.836998 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.959156 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-ovsdbserver-nb\") pod \"64067ae9-7904-4133-9c1f-f09a64dd3209\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.959202 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-dns-svc\") pod \"64067ae9-7904-4133-9c1f-f09a64dd3209\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.959250 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qcwm\" (UniqueName: \"kubernetes.io/projected/64067ae9-7904-4133-9c1f-f09a64dd3209-kube-api-access-6qcwm\") pod \"64067ae9-7904-4133-9c1f-f09a64dd3209\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.959290 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-ovsdbserver-sb\") pod \"64067ae9-7904-4133-9c1f-f09a64dd3209\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.959551 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-config\") pod \"64067ae9-7904-4133-9c1f-f09a64dd3209\" (UID: \"64067ae9-7904-4133-9c1f-f09a64dd3209\") " Sep 30 10:10:26 crc kubenswrapper[4730]: I0930 10:10:26.966915 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64067ae9-7904-4133-9c1f-f09a64dd3209-kube-api-access-6qcwm" (OuterVolumeSpecName: "kube-api-access-6qcwm") pod "64067ae9-7904-4133-9c1f-f09a64dd3209" (UID: "64067ae9-7904-4133-9c1f-f09a64dd3209"). InnerVolumeSpecName "kube-api-access-6qcwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.032022 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "64067ae9-7904-4133-9c1f-f09a64dd3209" (UID: "64067ae9-7904-4133-9c1f-f09a64dd3209"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.050299 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-config" (OuterVolumeSpecName: "config") pod "64067ae9-7904-4133-9c1f-f09a64dd3209" (UID: "64067ae9-7904-4133-9c1f-f09a64dd3209"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.055128 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "64067ae9-7904-4133-9c1f-f09a64dd3209" (UID: "64067ae9-7904-4133-9c1f-f09a64dd3209"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.057042 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "64067ae9-7904-4133-9c1f-f09a64dd3209" (UID: "64067ae9-7904-4133-9c1f-f09a64dd3209"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.064770 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.064981 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.065120 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.065221 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qcwm\" (UniqueName: \"kubernetes.io/projected/64067ae9-7904-4133-9c1f-f09a64dd3209-kube-api-access-6qcwm\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.065343 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64067ae9-7904-4133-9c1f-f09a64dd3209-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.069420 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-b5t67"] Sep 30 10:10:27 crc kubenswrapper[4730]: E0930 10:10:27.070002 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64067ae9-7904-4133-9c1f-f09a64dd3209" containerName="init" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.070032 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="64067ae9-7904-4133-9c1f-f09a64dd3209" containerName="init" Sep 30 10:10:27 crc kubenswrapper[4730]: E0930 10:10:27.070042 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64067ae9-7904-4133-9c1f-f09a64dd3209" containerName="dnsmasq-dns" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.070053 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="64067ae9-7904-4133-9c1f-f09a64dd3209" containerName="dnsmasq-dns" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.070369 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="64067ae9-7904-4133-9c1f-f09a64dd3209" containerName="dnsmasq-dns" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.071518 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.073995 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.074691 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.077645 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-b5t67"] Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.167074 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngvzd\" (UniqueName: \"kubernetes.io/projected/e721808e-e96b-4395-8acc-cf3416d9a100-kube-api-access-ngvzd\") pod \"nova-cell1-cell-mapping-b5t67\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.167137 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-b5t67\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.167171 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-config-data\") pod \"nova-cell1-cell-mapping-b5t67\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.167482 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-scripts\") pod \"nova-cell1-cell-mapping-b5t67\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.269134 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-scripts\") pod \"nova-cell1-cell-mapping-b5t67\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.269460 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngvzd\" (UniqueName: \"kubernetes.io/projected/e721808e-e96b-4395-8acc-cf3416d9a100-kube-api-access-ngvzd\") pod \"nova-cell1-cell-mapping-b5t67\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.269498 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-b5t67\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.269530 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-config-data\") pod \"nova-cell1-cell-mapping-b5t67\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.273879 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-scripts\") pod \"nova-cell1-cell-mapping-b5t67\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.273919 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-b5t67\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.275257 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-config-data\") pod \"nova-cell1-cell-mapping-b5t67\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.287828 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngvzd\" (UniqueName: \"kubernetes.io/projected/e721808e-e96b-4395-8acc-cf3416d9a100-kube-api-access-ngvzd\") pod \"nova-cell1-cell-mapping-b5t67\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.434670 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.835461 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eb0bfb8-0c6a-4d5f-9b85-e06888511203","Type":"ContainerStarted","Data":"81a7205ef18c65cacb4a108c3a59b9eaca4ea91863270b57313e1496163e43fb"} Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.835842 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb" Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.896139 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb"] Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.908278 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6dfdd9cfdc-s9sqb"] Sep 30 10:10:27 crc kubenswrapper[4730]: I0930 10:10:27.962368 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-b5t67"] Sep 30 10:10:27 crc kubenswrapper[4730]: W0930 10:10:27.962928 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode721808e_e96b_4395_8acc_cf3416d9a100.slice/crio-5f8d804fc61f3c98a4c26d55bb5b8692c6a610ad4fcf98db10f811bbb1627b55 WatchSource:0}: Error finding container 5f8d804fc61f3c98a4c26d55bb5b8692c6a610ad4fcf98db10f811bbb1627b55: Status 404 returned error can't find the container with id 5f8d804fc61f3c98a4c26d55bb5b8692c6a610ad4fcf98db10f811bbb1627b55 Sep 30 10:10:28 crc kubenswrapper[4730]: I0930 10:10:28.391887 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64067ae9-7904-4133-9c1f-f09a64dd3209" path="/var/lib/kubelet/pods/64067ae9-7904-4133-9c1f-f09a64dd3209/volumes" Sep 30 10:10:28 crc kubenswrapper[4730]: I0930 10:10:28.845393 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-b5t67" event={"ID":"e721808e-e96b-4395-8acc-cf3416d9a100","Type":"ContainerStarted","Data":"8e1eef1328d14555fb5c9534a3758cee5a955dff1036a2afd29183885d5b8efb"} Sep 30 10:10:28 crc kubenswrapper[4730]: I0930 10:10:28.845683 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-b5t67" event={"ID":"e721808e-e96b-4395-8acc-cf3416d9a100","Type":"ContainerStarted","Data":"5f8d804fc61f3c98a4c26d55bb5b8692c6a610ad4fcf98db10f811bbb1627b55"} Sep 30 10:10:28 crc kubenswrapper[4730]: I0930 10:10:28.848042 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4eb0bfb8-0c6a-4d5f-9b85-e06888511203","Type":"ContainerStarted","Data":"c16bd2ad0179bdf8dbc0866cf78bb7aa035512635840e25304c8df0d2d56888c"} Sep 30 10:10:28 crc kubenswrapper[4730]: I0930 10:10:28.848238 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 10:10:28 crc kubenswrapper[4730]: I0930 10:10:28.866103 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-b5t67" podStartSLOduration=1.866084941 podStartE2EDuration="1.866084941s" podCreationTimestamp="2025-09-30 10:10:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:10:28.861950513 +0000 UTC m=+1273.195210506" watchObservedRunningTime="2025-09-30 10:10:28.866084941 +0000 UTC m=+1273.199344934" Sep 30 10:10:28 crc kubenswrapper[4730]: I0930 10:10:28.887701 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.33033089 podStartE2EDuration="4.887680176s" podCreationTimestamp="2025-09-30 10:10:24 +0000 UTC" firstStartedPulling="2025-09-30 10:10:25.633716266 +0000 UTC m=+1269.966976259" lastFinishedPulling="2025-09-30 10:10:28.191065552 +0000 UTC m=+1272.524325545" observedRunningTime="2025-09-30 10:10:28.884340889 +0000 UTC m=+1273.217600892" watchObservedRunningTime="2025-09-30 10:10:28.887680176 +0000 UTC m=+1273.220940169" Sep 30 10:10:31 crc kubenswrapper[4730]: I0930 10:10:31.121903 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 10:10:31 crc kubenswrapper[4730]: I0930 10:10:31.122479 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 10:10:32 crc kubenswrapper[4730]: I0930 10:10:32.136882 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a503514f-2964-4836-836d-8987de4f1828" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.201:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 10:10:32 crc kubenswrapper[4730]: I0930 10:10:32.136918 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a503514f-2964-4836-836d-8987de4f1828" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.201:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 10:10:32 crc kubenswrapper[4730]: I0930 10:10:32.337456 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:10:32 crc kubenswrapper[4730]: I0930 10:10:32.337509 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:10:33 crc kubenswrapper[4730]: I0930 10:10:33.896871 4730 generic.go:334] "Generic (PLEG): container finished" podID="e721808e-e96b-4395-8acc-cf3416d9a100" containerID="8e1eef1328d14555fb5c9534a3758cee5a955dff1036a2afd29183885d5b8efb" exitCode=0 Sep 30 10:10:33 crc kubenswrapper[4730]: I0930 10:10:33.896952 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-b5t67" event={"ID":"e721808e-e96b-4395-8acc-cf3416d9a100","Type":"ContainerDied","Data":"8e1eef1328d14555fb5c9534a3758cee5a955dff1036a2afd29183885d5b8efb"} Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.300934 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.435122 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvzd\" (UniqueName: \"kubernetes.io/projected/e721808e-e96b-4395-8acc-cf3416d9a100-kube-api-access-ngvzd\") pod \"e721808e-e96b-4395-8acc-cf3416d9a100\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.435196 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-scripts\") pod \"e721808e-e96b-4395-8acc-cf3416d9a100\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.435289 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-config-data\") pod \"e721808e-e96b-4395-8acc-cf3416d9a100\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.436181 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-combined-ca-bundle\") pod \"e721808e-e96b-4395-8acc-cf3416d9a100\" (UID: \"e721808e-e96b-4395-8acc-cf3416d9a100\") " Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.440765 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-scripts" (OuterVolumeSpecName: "scripts") pod "e721808e-e96b-4395-8acc-cf3416d9a100" (UID: "e721808e-e96b-4395-8acc-cf3416d9a100"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.440887 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e721808e-e96b-4395-8acc-cf3416d9a100-kube-api-access-ngvzd" (OuterVolumeSpecName: "kube-api-access-ngvzd") pod "e721808e-e96b-4395-8acc-cf3416d9a100" (UID: "e721808e-e96b-4395-8acc-cf3416d9a100"). InnerVolumeSpecName "kube-api-access-ngvzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.463970 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-config-data" (OuterVolumeSpecName: "config-data") pod "e721808e-e96b-4395-8acc-cf3416d9a100" (UID: "e721808e-e96b-4395-8acc-cf3416d9a100"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.464920 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e721808e-e96b-4395-8acc-cf3416d9a100" (UID: "e721808e-e96b-4395-8acc-cf3416d9a100"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.539309 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvzd\" (UniqueName: \"kubernetes.io/projected/e721808e-e96b-4395-8acc-cf3416d9a100-kube-api-access-ngvzd\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.539347 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.539361 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.539372 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e721808e-e96b-4395-8acc-cf3416d9a100-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.941763 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-b5t67" event={"ID":"e721808e-e96b-4395-8acc-cf3416d9a100","Type":"ContainerDied","Data":"5f8d804fc61f3c98a4c26d55bb5b8692c6a610ad4fcf98db10f811bbb1627b55"} Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.941811 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f8d804fc61f3c98a4c26d55bb5b8692c6a610ad4fcf98db10f811bbb1627b55" Sep 30 10:10:35 crc kubenswrapper[4730]: I0930 10:10:35.941942 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-b5t67" Sep 30 10:10:36 crc kubenswrapper[4730]: I0930 10:10:36.124207 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:10:36 crc kubenswrapper[4730]: I0930 10:10:36.124487 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a503514f-2964-4836-836d-8987de4f1828" containerName="nova-api-log" containerID="cri-o://2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32" gracePeriod=30 Sep 30 10:10:36 crc kubenswrapper[4730]: I0930 10:10:36.124570 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a503514f-2964-4836-836d-8987de4f1828" containerName="nova-api-api" containerID="cri-o://065aef1bbe53169e007bb6066d696bcf175aef85a227702280a17d958d7846e4" gracePeriod=30 Sep 30 10:10:36 crc kubenswrapper[4730]: I0930 10:10:36.173785 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:10:36 crc kubenswrapper[4730]: I0930 10:10:36.174027 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583" containerName="nova-scheduler-scheduler" containerID="cri-o://3c7ff92795c93c78aa6f06e32fd6fd03afc8c243d51f4c28f80a20fe4f199b6e" gracePeriod=30 Sep 30 10:10:36 crc kubenswrapper[4730]: I0930 10:10:36.194518 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:10:36 crc kubenswrapper[4730]: I0930 10:10:36.195591 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="21add49c-8259-4090-88ac-34b1b97149b7" containerName="nova-metadata-log" containerID="cri-o://70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8" gracePeriod=30 Sep 30 10:10:36 crc kubenswrapper[4730]: I0930 10:10:36.195858 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="21add49c-8259-4090-88ac-34b1b97149b7" containerName="nova-metadata-metadata" containerID="cri-o://2796086665ed4226afa3534b09a17a44456280bb0d18f70a0bb4e0b5f40932d6" gracePeriod=30 Sep 30 10:10:36 crc kubenswrapper[4730]: E0930 10:10:36.332207 4730 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21add49c_8259_4090_88ac_34b1b97149b7.slice/crio-70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda503514f_2964_4836_836d_8987de4f1828.slice/crio-2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32.scope\": RecentStats: unable to find data in memory cache]" Sep 30 10:10:36 crc kubenswrapper[4730]: I0930 10:10:36.955980 4730 generic.go:334] "Generic (PLEG): container finished" podID="21add49c-8259-4090-88ac-34b1b97149b7" containerID="70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8" exitCode=143 Sep 30 10:10:36 crc kubenswrapper[4730]: I0930 10:10:36.956289 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21add49c-8259-4090-88ac-34b1b97149b7","Type":"ContainerDied","Data":"70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8"} Sep 30 10:10:36 crc kubenswrapper[4730]: I0930 10:10:36.958556 4730 generic.go:334] "Generic (PLEG): container finished" podID="a503514f-2964-4836-836d-8987de4f1828" containerID="2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32" exitCode=143 Sep 30 10:10:36 crc kubenswrapper[4730]: I0930 10:10:36.958589 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a503514f-2964-4836-836d-8987de4f1828","Type":"ContainerDied","Data":"2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32"} Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.464008 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.581069 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-public-tls-certs\") pod \"a503514f-2964-4836-836d-8987de4f1828\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.581124 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q78lp\" (UniqueName: \"kubernetes.io/projected/a503514f-2964-4836-836d-8987de4f1828-kube-api-access-q78lp\") pod \"a503514f-2964-4836-836d-8987de4f1828\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.581163 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a503514f-2964-4836-836d-8987de4f1828-logs\") pod \"a503514f-2964-4836-836d-8987de4f1828\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.581252 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-combined-ca-bundle\") pod \"a503514f-2964-4836-836d-8987de4f1828\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.581286 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-config-data\") pod \"a503514f-2964-4836-836d-8987de4f1828\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.581332 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-internal-tls-certs\") pod \"a503514f-2964-4836-836d-8987de4f1828\" (UID: \"a503514f-2964-4836-836d-8987de4f1828\") " Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.581866 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a503514f-2964-4836-836d-8987de4f1828-logs" (OuterVolumeSpecName: "logs") pod "a503514f-2964-4836-836d-8987de4f1828" (UID: "a503514f-2964-4836-836d-8987de4f1828"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.588483 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a503514f-2964-4836-836d-8987de4f1828-kube-api-access-q78lp" (OuterVolumeSpecName: "kube-api-access-q78lp") pod "a503514f-2964-4836-836d-8987de4f1828" (UID: "a503514f-2964-4836-836d-8987de4f1828"). InnerVolumeSpecName "kube-api-access-q78lp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.615586 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-config-data" (OuterVolumeSpecName: "config-data") pod "a503514f-2964-4836-836d-8987de4f1828" (UID: "a503514f-2964-4836-836d-8987de4f1828"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.617559 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a503514f-2964-4836-836d-8987de4f1828" (UID: "a503514f-2964-4836-836d-8987de4f1828"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.643583 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a503514f-2964-4836-836d-8987de4f1828" (UID: "a503514f-2964-4836-836d-8987de4f1828"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.644368 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.644485 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a503514f-2964-4836-836d-8987de4f1828" (UID: "a503514f-2964-4836-836d-8987de4f1828"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.690083 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q78lp\" (UniqueName: \"kubernetes.io/projected/a503514f-2964-4836-836d-8987de4f1828-kube-api-access-q78lp\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.690115 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a503514f-2964-4836-836d-8987de4f1828-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.690128 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.690140 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.690150 4730 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.690160 4730 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a503514f-2964-4836-836d-8987de4f1828-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.791851 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-combined-ca-bundle\") pod \"21add49c-8259-4090-88ac-34b1b97149b7\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.792371 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21add49c-8259-4090-88ac-34b1b97149b7-logs\") pod \"21add49c-8259-4090-88ac-34b1b97149b7\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.792411 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-nova-metadata-tls-certs\") pod \"21add49c-8259-4090-88ac-34b1b97149b7\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.792490 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-config-data\") pod \"21add49c-8259-4090-88ac-34b1b97149b7\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.792523 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mxrn\" (UniqueName: \"kubernetes.io/projected/21add49c-8259-4090-88ac-34b1b97149b7-kube-api-access-2mxrn\") pod \"21add49c-8259-4090-88ac-34b1b97149b7\" (UID: \"21add49c-8259-4090-88ac-34b1b97149b7\") " Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.793024 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21add49c-8259-4090-88ac-34b1b97149b7-logs" (OuterVolumeSpecName: "logs") pod "21add49c-8259-4090-88ac-34b1b97149b7" (UID: "21add49c-8259-4090-88ac-34b1b97149b7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.793409 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21add49c-8259-4090-88ac-34b1b97149b7-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.798713 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21add49c-8259-4090-88ac-34b1b97149b7-kube-api-access-2mxrn" (OuterVolumeSpecName: "kube-api-access-2mxrn") pod "21add49c-8259-4090-88ac-34b1b97149b7" (UID: "21add49c-8259-4090-88ac-34b1b97149b7"). InnerVolumeSpecName "kube-api-access-2mxrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.825654 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21add49c-8259-4090-88ac-34b1b97149b7" (UID: "21add49c-8259-4090-88ac-34b1b97149b7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.830493 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-config-data" (OuterVolumeSpecName: "config-data") pod "21add49c-8259-4090-88ac-34b1b97149b7" (UID: "21add49c-8259-4090-88ac-34b1b97149b7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.845465 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "21add49c-8259-4090-88ac-34b1b97149b7" (UID: "21add49c-8259-4090-88ac-34b1b97149b7"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.895072 4730 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.895112 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.895124 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mxrn\" (UniqueName: \"kubernetes.io/projected/21add49c-8259-4090-88ac-34b1b97149b7-kube-api-access-2mxrn\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.895133 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21add49c-8259-4090-88ac-34b1b97149b7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.970971 4730 generic.go:334] "Generic (PLEG): container finished" podID="8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583" containerID="3c7ff92795c93c78aa6f06e32fd6fd03afc8c243d51f4c28f80a20fe4f199b6e" exitCode=0 Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.971062 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583","Type":"ContainerDied","Data":"3c7ff92795c93c78aa6f06e32fd6fd03afc8c243d51f4c28f80a20fe4f199b6e"} Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.974670 4730 generic.go:334] "Generic (PLEG): container finished" podID="21add49c-8259-4090-88ac-34b1b97149b7" containerID="2796086665ed4226afa3534b09a17a44456280bb0d18f70a0bb4e0b5f40932d6" exitCode=0 Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.974747 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21add49c-8259-4090-88ac-34b1b97149b7","Type":"ContainerDied","Data":"2796086665ed4226afa3534b09a17a44456280bb0d18f70a0bb4e0b5f40932d6"} Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.974779 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"21add49c-8259-4090-88ac-34b1b97149b7","Type":"ContainerDied","Data":"f306a8418dfd312287c795dcf4ad35e73e8543666c11df799b3a2ba2c1da9895"} Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.974794 4730 scope.go:117] "RemoveContainer" containerID="2796086665ed4226afa3534b09a17a44456280bb0d18f70a0bb4e0b5f40932d6" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.974918 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.977552 4730 generic.go:334] "Generic (PLEG): container finished" podID="a503514f-2964-4836-836d-8987de4f1828" containerID="065aef1bbe53169e007bb6066d696bcf175aef85a227702280a17d958d7846e4" exitCode=0 Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.977577 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a503514f-2964-4836-836d-8987de4f1828","Type":"ContainerDied","Data":"065aef1bbe53169e007bb6066d696bcf175aef85a227702280a17d958d7846e4"} Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.977592 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a503514f-2964-4836-836d-8987de4f1828","Type":"ContainerDied","Data":"491d54791661faef63bec2c693ed59180fc1c46811541cc0457a82e4d926a299"} Sep 30 10:10:37 crc kubenswrapper[4730]: I0930 10:10:37.977646 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.016306 4730 scope.go:117] "RemoveContainer" containerID="70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.029237 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.083456 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.084732 4730 scope.go:117] "RemoveContainer" containerID="2796086665ed4226afa3534b09a17a44456280bb0d18f70a0bb4e0b5f40932d6" Sep 30 10:10:38 crc kubenswrapper[4730]: E0930 10:10:38.085064 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2796086665ed4226afa3534b09a17a44456280bb0d18f70a0bb4e0b5f40932d6\": container with ID starting with 2796086665ed4226afa3534b09a17a44456280bb0d18f70a0bb4e0b5f40932d6 not found: ID does not exist" containerID="2796086665ed4226afa3534b09a17a44456280bb0d18f70a0bb4e0b5f40932d6" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.085093 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2796086665ed4226afa3534b09a17a44456280bb0d18f70a0bb4e0b5f40932d6"} err="failed to get container status \"2796086665ed4226afa3534b09a17a44456280bb0d18f70a0bb4e0b5f40932d6\": rpc error: code = NotFound desc = could not find container \"2796086665ed4226afa3534b09a17a44456280bb0d18f70a0bb4e0b5f40932d6\": container with ID starting with 2796086665ed4226afa3534b09a17a44456280bb0d18f70a0bb4e0b5f40932d6 not found: ID does not exist" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.085117 4730 scope.go:117] "RemoveContainer" containerID="70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8" Sep 30 10:10:38 crc kubenswrapper[4730]: E0930 10:10:38.085347 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8\": container with ID starting with 70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8 not found: ID does not exist" containerID="70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.085364 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8"} err="failed to get container status \"70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8\": rpc error: code = NotFound desc = could not find container \"70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8\": container with ID starting with 70dbd6346cef1d39e7fce864e5c7922fdc918b6b1d2c80b577098fd7931895a8 not found: ID does not exist" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.085376 4730 scope.go:117] "RemoveContainer" containerID="065aef1bbe53169e007bb6066d696bcf175aef85a227702280a17d958d7846e4" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.111476 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.114002 4730 scope.go:117] "RemoveContainer" containerID="2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.135584 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.143144 4730 scope.go:117] "RemoveContainer" containerID="065aef1bbe53169e007bb6066d696bcf175aef85a227702280a17d958d7846e4" Sep 30 10:10:38 crc kubenswrapper[4730]: E0930 10:10:38.144139 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"065aef1bbe53169e007bb6066d696bcf175aef85a227702280a17d958d7846e4\": container with ID starting with 065aef1bbe53169e007bb6066d696bcf175aef85a227702280a17d958d7846e4 not found: ID does not exist" containerID="065aef1bbe53169e007bb6066d696bcf175aef85a227702280a17d958d7846e4" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.144182 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"065aef1bbe53169e007bb6066d696bcf175aef85a227702280a17d958d7846e4"} err="failed to get container status \"065aef1bbe53169e007bb6066d696bcf175aef85a227702280a17d958d7846e4\": rpc error: code = NotFound desc = could not find container \"065aef1bbe53169e007bb6066d696bcf175aef85a227702280a17d958d7846e4\": container with ID starting with 065aef1bbe53169e007bb6066d696bcf175aef85a227702280a17d958d7846e4 not found: ID does not exist" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.144207 4730 scope.go:117] "RemoveContainer" containerID="2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32" Sep 30 10:10:38 crc kubenswrapper[4730]: E0930 10:10:38.144494 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32\": container with ID starting with 2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32 not found: ID does not exist" containerID="2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.144526 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32"} err="failed to get container status \"2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32\": rpc error: code = NotFound desc = could not find container \"2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32\": container with ID starting with 2223d980b1ee911c35f4fa58fd44432808629e857906ec263a47815424a55f32 not found: ID does not exist" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.157591 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:10:38 crc kubenswrapper[4730]: E0930 10:10:38.158072 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21add49c-8259-4090-88ac-34b1b97149b7" containerName="nova-metadata-metadata" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.158093 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="21add49c-8259-4090-88ac-34b1b97149b7" containerName="nova-metadata-metadata" Sep 30 10:10:38 crc kubenswrapper[4730]: E0930 10:10:38.158104 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e721808e-e96b-4395-8acc-cf3416d9a100" containerName="nova-manage" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.158110 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="e721808e-e96b-4395-8acc-cf3416d9a100" containerName="nova-manage" Sep 30 10:10:38 crc kubenswrapper[4730]: E0930 10:10:38.158131 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a503514f-2964-4836-836d-8987de4f1828" containerName="nova-api-api" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.158137 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a503514f-2964-4836-836d-8987de4f1828" containerName="nova-api-api" Sep 30 10:10:38 crc kubenswrapper[4730]: E0930 10:10:38.158149 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21add49c-8259-4090-88ac-34b1b97149b7" containerName="nova-metadata-log" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.158155 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="21add49c-8259-4090-88ac-34b1b97149b7" containerName="nova-metadata-log" Sep 30 10:10:38 crc kubenswrapper[4730]: E0930 10:10:38.158168 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a503514f-2964-4836-836d-8987de4f1828" containerName="nova-api-log" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.158174 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a503514f-2964-4836-836d-8987de4f1828" containerName="nova-api-log" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.158371 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="21add49c-8259-4090-88ac-34b1b97149b7" containerName="nova-metadata-metadata" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.158385 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="a503514f-2964-4836-836d-8987de4f1828" containerName="nova-api-api" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.158398 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="21add49c-8259-4090-88ac-34b1b97149b7" containerName="nova-metadata-log" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.158411 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="e721808e-e96b-4395-8acc-cf3416d9a100" containerName="nova-manage" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.158425 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="a503514f-2964-4836-836d-8987de4f1828" containerName="nova-api-log" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.163497 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.166286 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.166496 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.172116 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.176782 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.180223 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.180438 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.183155 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.188837 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.189721 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.201295 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.301427 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2t2w\" (UniqueName: \"kubernetes.io/projected/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-kube-api-access-m2t2w\") pod \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\" (UID: \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\") " Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.301473 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-config-data\") pod \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\" (UID: \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\") " Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.301658 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-combined-ca-bundle\") pod \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\" (UID: \"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583\") " Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.302022 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0332adf1-8148-4923-9273-1ef8869dfad1-public-tls-certs\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.302072 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0332adf1-8148-4923-9273-1ef8869dfad1-logs\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.302130 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.302231 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zqdf\" (UniqueName: \"kubernetes.io/projected/0332adf1-8148-4923-9273-1ef8869dfad1-kube-api-access-4zqdf\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.302290 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nndds\" (UniqueName: \"kubernetes.io/projected/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-kube-api-access-nndds\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.302342 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-config-data\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.302361 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0332adf1-8148-4923-9273-1ef8869dfad1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.302386 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-logs\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.302411 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.302440 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0332adf1-8148-4923-9273-1ef8869dfad1-config-data\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.302493 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0332adf1-8148-4923-9273-1ef8869dfad1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.306537 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-kube-api-access-m2t2w" (OuterVolumeSpecName: "kube-api-access-m2t2w") pod "8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583" (UID: "8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583"). InnerVolumeSpecName "kube-api-access-m2t2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.329975 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-config-data" (OuterVolumeSpecName: "config-data") pod "8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583" (UID: "8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.332417 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583" (UID: "8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.397849 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21add49c-8259-4090-88ac-34b1b97149b7" path="/var/lib/kubelet/pods/21add49c-8259-4090-88ac-34b1b97149b7/volumes" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.402900 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a503514f-2964-4836-836d-8987de4f1828" path="/var/lib/kubelet/pods/a503514f-2964-4836-836d-8987de4f1828/volumes" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.405869 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.405940 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0332adf1-8148-4923-9273-1ef8869dfad1-config-data\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.405986 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0332adf1-8148-4923-9273-1ef8869dfad1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.406121 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0332adf1-8148-4923-9273-1ef8869dfad1-public-tls-certs\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.406181 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0332adf1-8148-4923-9273-1ef8869dfad1-logs\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.406239 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.406288 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zqdf\" (UniqueName: \"kubernetes.io/projected/0332adf1-8148-4923-9273-1ef8869dfad1-kube-api-access-4zqdf\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.406334 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nndds\" (UniqueName: \"kubernetes.io/projected/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-kube-api-access-nndds\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.406376 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-config-data\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.406408 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0332adf1-8148-4923-9273-1ef8869dfad1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.406454 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-logs\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.406550 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.406579 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2t2w\" (UniqueName: \"kubernetes.io/projected/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-kube-api-access-m2t2w\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.406599 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.407089 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-logs\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.422385 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0332adf1-8148-4923-9273-1ef8869dfad1-logs\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.424374 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0332adf1-8148-4923-9273-1ef8869dfad1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.432355 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0332adf1-8148-4923-9273-1ef8869dfad1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.437666 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0332adf1-8148-4923-9273-1ef8869dfad1-public-tls-certs\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.442163 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.445183 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.447131 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zqdf\" (UniqueName: \"kubernetes.io/projected/0332adf1-8148-4923-9273-1ef8869dfad1-kube-api-access-4zqdf\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.449202 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0332adf1-8148-4923-9273-1ef8869dfad1-config-data\") pod \"nova-api-0\" (UID: \"0332adf1-8148-4923-9273-1ef8869dfad1\") " pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.449847 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-config-data\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.472479 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nndds\" (UniqueName: \"kubernetes.io/projected/a3c8895e-0bd8-4e06-a121-3afe3bcdf54f-kube-api-access-nndds\") pod \"nova-metadata-0\" (UID: \"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f\") " pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.490114 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.509838 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.993230 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583","Type":"ContainerDied","Data":"98e292868f7f80083d8a8b7db102974bec1f035e831bb290d64ffb57cf21817c"} Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.993946 4730 scope.go:117] "RemoveContainer" containerID="3c7ff92795c93c78aa6f06e32fd6fd03afc8c243d51f4c28f80a20fe4f199b6e" Sep 30 10:10:38 crc kubenswrapper[4730]: I0930 10:10:38.994117 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.015090 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.026782 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.035687 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:10:39 crc kubenswrapper[4730]: W0930 10:10:39.042029 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3c8895e_0bd8_4e06_a121_3afe3bcdf54f.slice/crio-5f67a8331f003735f9c1ace748a9aff4be07b100960961792d5c31075b3e5ec1 WatchSource:0}: Error finding container 5f67a8331f003735f9c1ace748a9aff4be07b100960961792d5c31075b3e5ec1: Status 404 returned error can't find the container with id 5f67a8331f003735f9c1ace748a9aff4be07b100960961792d5c31075b3e5ec1 Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.044481 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:10:39 crc kubenswrapper[4730]: E0930 10:10:39.045001 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583" containerName="nova-scheduler-scheduler" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.045024 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583" containerName="nova-scheduler-scheduler" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.045274 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583" containerName="nova-scheduler-scheduler" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.046161 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.052378 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.074220 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:10:39 crc kubenswrapper[4730]: W0930 10:10:39.109965 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0332adf1_8148_4923_9273_1ef8869dfad1.slice/crio-54b8ffec5aeeb4027e9a0db3574c662ee225151f5efc5491b19a2a288772dd2d WatchSource:0}: Error finding container 54b8ffec5aeeb4027e9a0db3574c662ee225151f5efc5491b19a2a288772dd2d: Status 404 returned error can't find the container with id 54b8ffec5aeeb4027e9a0db3574c662ee225151f5efc5491b19a2a288772dd2d Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.112317 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.119032 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blkc2\" (UniqueName: \"kubernetes.io/projected/fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f-kube-api-access-blkc2\") pod \"nova-scheduler-0\" (UID: \"fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f\") " pod="openstack/nova-scheduler-0" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.119130 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f-config-data\") pod \"nova-scheduler-0\" (UID: \"fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f\") " pod="openstack/nova-scheduler-0" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.119161 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f\") " pod="openstack/nova-scheduler-0" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.220922 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f-config-data\") pod \"nova-scheduler-0\" (UID: \"fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f\") " pod="openstack/nova-scheduler-0" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.221068 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f\") " pod="openstack/nova-scheduler-0" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.221203 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blkc2\" (UniqueName: \"kubernetes.io/projected/fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f-kube-api-access-blkc2\") pod \"nova-scheduler-0\" (UID: \"fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f\") " pod="openstack/nova-scheduler-0" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.231382 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f\") " pod="openstack/nova-scheduler-0" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.231375 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f-config-data\") pod \"nova-scheduler-0\" (UID: \"fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f\") " pod="openstack/nova-scheduler-0" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.237552 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blkc2\" (UniqueName: \"kubernetes.io/projected/fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f-kube-api-access-blkc2\") pod \"nova-scheduler-0\" (UID: \"fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f\") " pod="openstack/nova-scheduler-0" Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.369598 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 10:10:39 crc kubenswrapper[4730]: W0930 10:10:39.788028 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfca33d2f_4ffc_40bf_a02b_f5f757ca2d1f.slice/crio-ff57a0b63ec68c6816b53d40a095c4b724afa71ce3ac469be18257d50672f05f WatchSource:0}: Error finding container ff57a0b63ec68c6816b53d40a095c4b724afa71ce3ac469be18257d50672f05f: Status 404 returned error can't find the container with id ff57a0b63ec68c6816b53d40a095c4b724afa71ce3ac469be18257d50672f05f Sep 30 10:10:39 crc kubenswrapper[4730]: I0930 10:10:39.796761 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 10:10:40 crc kubenswrapper[4730]: I0930 10:10:40.013939 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0332adf1-8148-4923-9273-1ef8869dfad1","Type":"ContainerStarted","Data":"5df00ec65f7f62117d94fa73b6e5436087acc610e1d5dcdb2d4c6dc1cc1e414b"} Sep 30 10:10:40 crc kubenswrapper[4730]: I0930 10:10:40.014180 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0332adf1-8148-4923-9273-1ef8869dfad1","Type":"ContainerStarted","Data":"7885c82ce9bb49ab5988a5e87c3b6147e726be8427f912e2d30dad3af01ff5eb"} Sep 30 10:10:40 crc kubenswrapper[4730]: I0930 10:10:40.014262 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0332adf1-8148-4923-9273-1ef8869dfad1","Type":"ContainerStarted","Data":"54b8ffec5aeeb4027e9a0db3574c662ee225151f5efc5491b19a2a288772dd2d"} Sep 30 10:10:40 crc kubenswrapper[4730]: I0930 10:10:40.015135 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f","Type":"ContainerStarted","Data":"1b5589167a624d73417e982764acace0ff2c7b8156b0f77db65c56be4266a5c4"} Sep 30 10:10:40 crc kubenswrapper[4730]: I0930 10:10:40.015157 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f","Type":"ContainerStarted","Data":"b6bdf1647a44d32541ef840979f4257ba4105dbaa00263451a580f6b4bc653ce"} Sep 30 10:10:40 crc kubenswrapper[4730]: I0930 10:10:40.015165 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a3c8895e-0bd8-4e06-a121-3afe3bcdf54f","Type":"ContainerStarted","Data":"5f67a8331f003735f9c1ace748a9aff4be07b100960961792d5c31075b3e5ec1"} Sep 30 10:10:40 crc kubenswrapper[4730]: I0930 10:10:40.016914 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f","Type":"ContainerStarted","Data":"790d25c9e1dd9c9d4866859b19bb768f791d9ebf62dc3902d13475d26f574bc2"} Sep 30 10:10:40 crc kubenswrapper[4730]: I0930 10:10:40.016942 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f","Type":"ContainerStarted","Data":"ff57a0b63ec68c6816b53d40a095c4b724afa71ce3ac469be18257d50672f05f"} Sep 30 10:10:40 crc kubenswrapper[4730]: I0930 10:10:40.037801 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.037781742 podStartE2EDuration="2.037781742s" podCreationTimestamp="2025-09-30 10:10:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:10:40.030387208 +0000 UTC m=+1284.363647211" watchObservedRunningTime="2025-09-30 10:10:40.037781742 +0000 UTC m=+1284.371041735" Sep 30 10:10:40 crc kubenswrapper[4730]: I0930 10:10:40.054298 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.054282543 podStartE2EDuration="1.054282543s" podCreationTimestamp="2025-09-30 10:10:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:10:40.048529303 +0000 UTC m=+1284.381789306" watchObservedRunningTime="2025-09-30 10:10:40.054282543 +0000 UTC m=+1284.387542536" Sep 30 10:10:40 crc kubenswrapper[4730]: I0930 10:10:40.070207 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.07018772 podStartE2EDuration="2.07018772s" podCreationTimestamp="2025-09-30 10:10:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:10:40.063263709 +0000 UTC m=+1284.396523702" watchObservedRunningTime="2025-09-30 10:10:40.07018772 +0000 UTC m=+1284.403447713" Sep 30 10:10:40 crc kubenswrapper[4730]: I0930 10:10:40.420716 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583" path="/var/lib/kubelet/pods/8dd0a8ec-fce4-4b9c-8a10-e4ebf910c583/volumes" Sep 30 10:10:43 crc kubenswrapper[4730]: I0930 10:10:43.491527 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 10:10:43 crc kubenswrapper[4730]: I0930 10:10:43.492022 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 10:10:44 crc kubenswrapper[4730]: I0930 10:10:44.370706 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 10:10:48 crc kubenswrapper[4730]: I0930 10:10:48.491883 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 10:10:48 crc kubenswrapper[4730]: I0930 10:10:48.492894 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 10:10:48 crc kubenswrapper[4730]: I0930 10:10:48.511672 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 10:10:48 crc kubenswrapper[4730]: I0930 10:10:48.511762 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 10:10:49 crc kubenswrapper[4730]: I0930 10:10:49.370255 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 10:10:49 crc kubenswrapper[4730]: I0930 10:10:49.407931 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 10:10:49 crc kubenswrapper[4730]: I0930 10:10:49.542758 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="a3c8895e-0bd8-4e06-a121-3afe3bcdf54f" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 10:10:49 crc kubenswrapper[4730]: I0930 10:10:49.542779 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="a3c8895e-0bd8-4e06-a121-3afe3bcdf54f" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 10:10:49 crc kubenswrapper[4730]: I0930 10:10:49.560952 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0332adf1-8148-4923-9273-1ef8869dfad1" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.205:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 10:10:49 crc kubenswrapper[4730]: I0930 10:10:49.560970 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0332adf1-8148-4923-9273-1ef8869dfad1" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.205:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 10:10:50 crc kubenswrapper[4730]: I0930 10:10:50.134871 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 10:10:55 crc kubenswrapper[4730]: I0930 10:10:55.216105 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 10:10:58 crc kubenswrapper[4730]: I0930 10:10:58.496655 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 10:10:58 crc kubenswrapper[4730]: I0930 10:10:58.500640 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 10:10:58 crc kubenswrapper[4730]: I0930 10:10:58.502343 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 10:10:58 crc kubenswrapper[4730]: I0930 10:10:58.528730 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 10:10:58 crc kubenswrapper[4730]: I0930 10:10:58.529066 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 10:10:58 crc kubenswrapper[4730]: I0930 10:10:58.529244 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 10:10:58 crc kubenswrapper[4730]: I0930 10:10:58.543951 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 10:10:59 crc kubenswrapper[4730]: I0930 10:10:59.183699 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 10:10:59 crc kubenswrapper[4730]: I0930 10:10:59.187762 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 10:10:59 crc kubenswrapper[4730]: I0930 10:10:59.194383 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 10:11:02 crc kubenswrapper[4730]: I0930 10:11:02.337123 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:11:02 crc kubenswrapper[4730]: I0930 10:11:02.337494 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:11:07 crc kubenswrapper[4730]: I0930 10:11:07.532529 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 10:11:08 crc kubenswrapper[4730]: I0930 10:11:08.377305 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 10:11:10 crc kubenswrapper[4730]: I0930 10:11:10.824376 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="6cf0ebea-06fc-47b2-a2c6-95605e023f94" containerName="rabbitmq" containerID="cri-o://d5c17e01b1b9d8c9200a2d1eacf6a349ed0515db67e3b0da61dfe2991655125e" gracePeriod=604797 Sep 30 10:11:11 crc kubenswrapper[4730]: I0930 10:11:11.467401 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" containerName="rabbitmq" containerID="cri-o://32968aaf1a40ad69e937f609110da6deb679b6cf3fb2a01b61fc8aab90a2316c" gracePeriod=604797 Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.320495 4730 generic.go:334] "Generic (PLEG): container finished" podID="6cf0ebea-06fc-47b2-a2c6-95605e023f94" containerID="d5c17e01b1b9d8c9200a2d1eacf6a349ed0515db67e3b0da61dfe2991655125e" exitCode=0 Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.320794 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6cf0ebea-06fc-47b2-a2c6-95605e023f94","Type":"ContainerDied","Data":"d5c17e01b1b9d8c9200a2d1eacf6a349ed0515db67e3b0da61dfe2991655125e"} Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.456805 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.594387 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-plugins\") pod \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.594468 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-erlang-cookie\") pod \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.594524 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6cf0ebea-06fc-47b2-a2c6-95605e023f94-pod-info\") pod \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.594553 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.594630 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-plugins-conf\") pod \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.594768 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-server-conf\") pod \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.594868 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-confd\") pod \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.594969 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxbpr\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-kube-api-access-gxbpr\") pod \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.595266 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-tls\") pod \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.595304 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-config-data\") pod \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.595448 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6cf0ebea-06fc-47b2-a2c6-95605e023f94-erlang-cookie-secret\") pod \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\" (UID: \"6cf0ebea-06fc-47b2-a2c6-95605e023f94\") " Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.595704 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "6cf0ebea-06fc-47b2-a2c6-95605e023f94" (UID: "6cf0ebea-06fc-47b2-a2c6-95605e023f94"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.595825 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "6cf0ebea-06fc-47b2-a2c6-95605e023f94" (UID: "6cf0ebea-06fc-47b2-a2c6-95605e023f94"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.596397 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "6cf0ebea-06fc-47b2-a2c6-95605e023f94" (UID: "6cf0ebea-06fc-47b2-a2c6-95605e023f94"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.597246 4730 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.597283 4730 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.597299 4730 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.601554 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "persistence") pod "6cf0ebea-06fc-47b2-a2c6-95605e023f94" (UID: "6cf0ebea-06fc-47b2-a2c6-95605e023f94"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.601578 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/6cf0ebea-06fc-47b2-a2c6-95605e023f94-pod-info" (OuterVolumeSpecName: "pod-info") pod "6cf0ebea-06fc-47b2-a2c6-95605e023f94" (UID: "6cf0ebea-06fc-47b2-a2c6-95605e023f94"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.601814 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cf0ebea-06fc-47b2-a2c6-95605e023f94-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "6cf0ebea-06fc-47b2-a2c6-95605e023f94" (UID: "6cf0ebea-06fc-47b2-a2c6-95605e023f94"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.602880 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "6cf0ebea-06fc-47b2-a2c6-95605e023f94" (UID: "6cf0ebea-06fc-47b2-a2c6-95605e023f94"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.607810 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-kube-api-access-gxbpr" (OuterVolumeSpecName: "kube-api-access-gxbpr") pod "6cf0ebea-06fc-47b2-a2c6-95605e023f94" (UID: "6cf0ebea-06fc-47b2-a2c6-95605e023f94"). InnerVolumeSpecName "kube-api-access-gxbpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.622081 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-config-data" (OuterVolumeSpecName: "config-data") pod "6cf0ebea-06fc-47b2-a2c6-95605e023f94" (UID: "6cf0ebea-06fc-47b2-a2c6-95605e023f94"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.658987 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-server-conf" (OuterVolumeSpecName: "server-conf") pod "6cf0ebea-06fc-47b2-a2c6-95605e023f94" (UID: "6cf0ebea-06fc-47b2-a2c6-95605e023f94"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.699686 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxbpr\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-kube-api-access-gxbpr\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.699724 4730 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.699737 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.699751 4730 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6cf0ebea-06fc-47b2-a2c6-95605e023f94-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.699761 4730 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6cf0ebea-06fc-47b2-a2c6-95605e023f94-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.699790 4730 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.699803 4730 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6cf0ebea-06fc-47b2-a2c6-95605e023f94-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.727324 4730 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.742745 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "6cf0ebea-06fc-47b2-a2c6-95605e023f94" (UID: "6cf0ebea-06fc-47b2-a2c6-95605e023f94"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.802046 4730 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.802095 4730 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6cf0ebea-06fc-47b2-a2c6-95605e023f94-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:12 crc kubenswrapper[4730]: I0930 10:11:12.922892 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.006324 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-erlang-cookie-secret\") pod \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.006384 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.006434 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-erlang-cookie\") pod \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.006475 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-tls\") pod \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.006517 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-config-data\") pod \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.006596 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-pod-info\") pod \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.006656 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-plugins-conf\") pod \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.006688 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-server-conf\") pod \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.006720 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-confd\") pod \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.006800 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvmtd\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-kube-api-access-hvmtd\") pod \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.006894 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-plugins\") pod \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\" (UID: \"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2\") " Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.007443 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" (UID: "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.007473 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" (UID: "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.007744 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" (UID: "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.011625 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-kube-api-access-hvmtd" (OuterVolumeSpecName: "kube-api-access-hvmtd") pod "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" (UID: "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2"). InnerVolumeSpecName "kube-api-access-hvmtd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.012206 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" (UID: "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.016950 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-pod-info" (OuterVolumeSpecName: "pod-info") pod "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" (UID: "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.016958 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "persistence") pod "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" (UID: "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.017070 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" (UID: "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.031857 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-config-data" (OuterVolumeSpecName: "config-data") pod "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" (UID: "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.071376 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-server-conf" (OuterVolumeSpecName: "server-conf") pod "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" (UID: "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.109252 4730 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.109282 4730 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.109291 4730 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.109300 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvmtd\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-kube-api-access-hvmtd\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.109309 4730 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.109317 4730 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.109346 4730 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.109355 4730 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.109363 4730 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.109371 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.127128 4730 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.131763 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" (UID: "b1cbad29-68e1-42a9-af8f-ea1cfcb774a2"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.210825 4730 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.210862 4730 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.333241 4730 generic.go:334] "Generic (PLEG): container finished" podID="b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" containerID="32968aaf1a40ad69e937f609110da6deb679b6cf3fb2a01b61fc8aab90a2316c" exitCode=0 Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.333293 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2","Type":"ContainerDied","Data":"32968aaf1a40ad69e937f609110da6deb679b6cf3fb2a01b61fc8aab90a2316c"} Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.333324 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.333348 4730 scope.go:117] "RemoveContainer" containerID="32968aaf1a40ad69e937f609110da6deb679b6cf3fb2a01b61fc8aab90a2316c" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.333334 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b1cbad29-68e1-42a9-af8f-ea1cfcb774a2","Type":"ContainerDied","Data":"fc1a13fe9fca3e3222438c2d55d6157e75f25172e865cc42ed30d2d1ee6f6aea"} Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.339593 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6cf0ebea-06fc-47b2-a2c6-95605e023f94","Type":"ContainerDied","Data":"35dc2cf71bef2e853e09a15d73b9ba802d9331ebf376bbb6c4e6993a21e146fd"} Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.339668 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.396739 4730 scope.go:117] "RemoveContainer" containerID="8dafeeba9b157ce2800566bddda816b3d119847d03a7d6c84aff95e1c288798c" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.400875 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.430200 4730 scope.go:117] "RemoveContainer" containerID="32968aaf1a40ad69e937f609110da6deb679b6cf3fb2a01b61fc8aab90a2316c" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.430322 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 10:11:13 crc kubenswrapper[4730]: E0930 10:11:13.432069 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32968aaf1a40ad69e937f609110da6deb679b6cf3fb2a01b61fc8aab90a2316c\": container with ID starting with 32968aaf1a40ad69e937f609110da6deb679b6cf3fb2a01b61fc8aab90a2316c not found: ID does not exist" containerID="32968aaf1a40ad69e937f609110da6deb679b6cf3fb2a01b61fc8aab90a2316c" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.432109 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32968aaf1a40ad69e937f609110da6deb679b6cf3fb2a01b61fc8aab90a2316c"} err="failed to get container status \"32968aaf1a40ad69e937f609110da6deb679b6cf3fb2a01b61fc8aab90a2316c\": rpc error: code = NotFound desc = could not find container \"32968aaf1a40ad69e937f609110da6deb679b6cf3fb2a01b61fc8aab90a2316c\": container with ID starting with 32968aaf1a40ad69e937f609110da6deb679b6cf3fb2a01b61fc8aab90a2316c not found: ID does not exist" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.432133 4730 scope.go:117] "RemoveContainer" containerID="8dafeeba9b157ce2800566bddda816b3d119847d03a7d6c84aff95e1c288798c" Sep 30 10:11:13 crc kubenswrapper[4730]: E0930 10:11:13.435734 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dafeeba9b157ce2800566bddda816b3d119847d03a7d6c84aff95e1c288798c\": container with ID starting with 8dafeeba9b157ce2800566bddda816b3d119847d03a7d6c84aff95e1c288798c not found: ID does not exist" containerID="8dafeeba9b157ce2800566bddda816b3d119847d03a7d6c84aff95e1c288798c" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.435772 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dafeeba9b157ce2800566bddda816b3d119847d03a7d6c84aff95e1c288798c"} err="failed to get container status \"8dafeeba9b157ce2800566bddda816b3d119847d03a7d6c84aff95e1c288798c\": rpc error: code = NotFound desc = could not find container \"8dafeeba9b157ce2800566bddda816b3d119847d03a7d6c84aff95e1c288798c\": container with ID starting with 8dafeeba9b157ce2800566bddda816b3d119847d03a7d6c84aff95e1c288798c not found: ID does not exist" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.435795 4730 scope.go:117] "RemoveContainer" containerID="d5c17e01b1b9d8c9200a2d1eacf6a349ed0515db67e3b0da61dfe2991655125e" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.443256 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.455289 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 10:11:13 crc kubenswrapper[4730]: E0930 10:11:13.455871 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cf0ebea-06fc-47b2-a2c6-95605e023f94" containerName="rabbitmq" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.455889 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cf0ebea-06fc-47b2-a2c6-95605e023f94" containerName="rabbitmq" Sep 30 10:11:13 crc kubenswrapper[4730]: E0930 10:11:13.455906 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" containerName="setup-container" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.455912 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" containerName="setup-container" Sep 30 10:11:13 crc kubenswrapper[4730]: E0930 10:11:13.455919 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cf0ebea-06fc-47b2-a2c6-95605e023f94" containerName="setup-container" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.455926 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cf0ebea-06fc-47b2-a2c6-95605e023f94" containerName="setup-container" Sep 30 10:11:13 crc kubenswrapper[4730]: E0930 10:11:13.455936 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" containerName="rabbitmq" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.455943 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" containerName="rabbitmq" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.456158 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cf0ebea-06fc-47b2-a2c6-95605e023f94" containerName="rabbitmq" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.456187 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" containerName="rabbitmq" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.457208 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.461803 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.461985 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.462016 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-vpw8r" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.462211 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.462243 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.462343 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.462367 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.470282 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.481226 4730 scope.go:117] "RemoveContainer" containerID="587710b1ff8f9bce76f7bf39a64d961cf638fbe04d60ee8225504cae9015fcfb" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.491441 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.511485 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.513510 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.516912 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.517248 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.517542 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-cbldm" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.520001 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.521232 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hjfs\" (UniqueName: \"kubernetes.io/projected/a3b79a67-6ca7-44cd-8108-9afb64437809-kube-api-access-9hjfs\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.521354 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a3b79a67-6ca7-44cd-8108-9afb64437809-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.521392 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a3b79a67-6ca7-44cd-8108-9afb64437809-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.521425 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a3b79a67-6ca7-44cd-8108-9afb64437809-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.521445 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a3b79a67-6ca7-44cd-8108-9afb64437809-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.521473 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a3b79a67-6ca7-44cd-8108-9afb64437809-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.521533 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a3b79a67-6ca7-44cd-8108-9afb64437809-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.521555 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a3b79a67-6ca7-44cd-8108-9afb64437809-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.521603 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a3b79a67-6ca7-44cd-8108-9afb64437809-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.521640 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.521666 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a3b79a67-6ca7-44cd-8108-9afb64437809-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.522014 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.522257 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.522268 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.528285 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.623675 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.623741 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a3b79a67-6ca7-44cd-8108-9afb64437809-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.623762 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.623779 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a3b79a67-6ca7-44cd-8108-9afb64437809-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.623801 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.623819 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.623838 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a3b79a67-6ca7-44cd-8108-9afb64437809-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.623855 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a3b79a67-6ca7-44cd-8108-9afb64437809-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.623873 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.623890 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a3b79a67-6ca7-44cd-8108-9afb64437809-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.623915 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhqmj\" (UniqueName: \"kubernetes.io/projected/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-kube-api-access-bhqmj\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.623945 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.623980 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.624002 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a3b79a67-6ca7-44cd-8108-9afb64437809-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.624024 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a3b79a67-6ca7-44cd-8108-9afb64437809-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.624063 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a3b79a67-6ca7-44cd-8108-9afb64437809-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.624082 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.624102 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a3b79a67-6ca7-44cd-8108-9afb64437809-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.624132 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-config-data\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.624176 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hjfs\" (UniqueName: \"kubernetes.io/projected/a3b79a67-6ca7-44cd-8108-9afb64437809-kube-api-access-9hjfs\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.624194 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.624213 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.625018 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a3b79a67-6ca7-44cd-8108-9afb64437809-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.625428 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a3b79a67-6ca7-44cd-8108-9afb64437809-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.625477 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a3b79a67-6ca7-44cd-8108-9afb64437809-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.626067 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a3b79a67-6ca7-44cd-8108-9afb64437809-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.626443 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a3b79a67-6ca7-44cd-8108-9afb64437809-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.627223 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.629820 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a3b79a67-6ca7-44cd-8108-9afb64437809-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.629820 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a3b79a67-6ca7-44cd-8108-9afb64437809-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.631266 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a3b79a67-6ca7-44cd-8108-9afb64437809-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.634173 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a3b79a67-6ca7-44cd-8108-9afb64437809-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.641445 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hjfs\" (UniqueName: \"kubernetes.io/projected/a3b79a67-6ca7-44cd-8108-9afb64437809-kube-api-access-9hjfs\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.659855 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a3b79a67-6ca7-44cd-8108-9afb64437809\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.725790 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.726030 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.726168 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.726286 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.726400 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.726985 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.727432 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.726939 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.727338 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.726734 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.727834 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhqmj\" (UniqueName: \"kubernetes.io/projected/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-kube-api-access-bhqmj\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.727951 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.728061 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.728132 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.728303 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-config-data\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.728913 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.729277 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-config-data\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.733280 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.738384 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.738426 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.748197 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.751145 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhqmj\" (UniqueName: \"kubernetes.io/projected/a1a78aec-c35b-41c6-a1e0-43fba77e84fd-kube-api-access-bhqmj\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.773657 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"a1a78aec-c35b-41c6-a1e0-43fba77e84fd\") " pod="openstack/rabbitmq-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.783022 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:13 crc kubenswrapper[4730]: I0930 10:11:13.854865 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 10:11:14 crc kubenswrapper[4730]: I0930 10:11:14.222659 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 10:11:14 crc kubenswrapper[4730]: I0930 10:11:14.318942 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 10:11:14 crc kubenswrapper[4730]: W0930 10:11:14.320723 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1a78aec_c35b_41c6_a1e0_43fba77e84fd.slice/crio-2bf6e19335ebd375c4a268352a8ae8638b2ba79cb7c0df26b360b6c0bb9c4822 WatchSource:0}: Error finding container 2bf6e19335ebd375c4a268352a8ae8638b2ba79cb7c0df26b360b6c0bb9c4822: Status 404 returned error can't find the container with id 2bf6e19335ebd375c4a268352a8ae8638b2ba79cb7c0df26b360b6c0bb9c4822 Sep 30 10:11:14 crc kubenswrapper[4730]: I0930 10:11:14.356087 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a3b79a67-6ca7-44cd-8108-9afb64437809","Type":"ContainerStarted","Data":"25476de29d2465cb7d4eba435b7af46665d89d4bcd65963762bd36773943337a"} Sep 30 10:11:14 crc kubenswrapper[4730]: I0930 10:11:14.357456 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a1a78aec-c35b-41c6-a1e0-43fba77e84fd","Type":"ContainerStarted","Data":"2bf6e19335ebd375c4a268352a8ae8638b2ba79cb7c0df26b360b6c0bb9c4822"} Sep 30 10:11:14 crc kubenswrapper[4730]: I0930 10:11:14.399248 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cf0ebea-06fc-47b2-a2c6-95605e023f94" path="/var/lib/kubelet/pods/6cf0ebea-06fc-47b2-a2c6-95605e023f94/volumes" Sep 30 10:11:14 crc kubenswrapper[4730]: I0930 10:11:14.400569 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1cbad29-68e1-42a9-af8f-ea1cfcb774a2" path="/var/lib/kubelet/pods/b1cbad29-68e1-42a9-af8f-ea1cfcb774a2/volumes" Sep 30 10:11:15 crc kubenswrapper[4730]: I0930 10:11:15.372146 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a3b79a67-6ca7-44cd-8108-9afb64437809","Type":"ContainerStarted","Data":"a678affb68479ac2cacb8184e40d3956918760bfa336bc83665f28bfcc2a3d4d"} Sep 30 10:11:15 crc kubenswrapper[4730]: I0930 10:11:15.374047 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a1a78aec-c35b-41c6-a1e0-43fba77e84fd","Type":"ContainerStarted","Data":"ad6fde4f023a3596826798f14db6bd29f9d21331f98ef9a18564e2d0997ae1dd"} Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.063234 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-598cd8bf55-7dpb6"] Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.065490 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.068038 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.074789 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-598cd8bf55-7dpb6"] Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.121693 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-ovsdbserver-sb\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.121829 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwpxn\" (UniqueName: \"kubernetes.io/projected/e888152b-fd89-49d3-934a-7586b0fabf19-kube-api-access-bwpxn\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.122070 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-ovsdbserver-nb\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.122267 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-config\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.122381 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-dns-svc\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.122466 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-openstack-edpm-ipam\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.224142 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-config\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.224216 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-dns-svc\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.224267 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-openstack-edpm-ipam\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.224310 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-ovsdbserver-sb\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.224376 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwpxn\" (UniqueName: \"kubernetes.io/projected/e888152b-fd89-49d3-934a-7586b0fabf19-kube-api-access-bwpxn\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.224420 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-ovsdbserver-nb\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.225098 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-config\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.225114 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-dns-svc\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.225234 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-ovsdbserver-nb\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.225640 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-openstack-edpm-ipam\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.229121 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-ovsdbserver-sb\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.242862 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwpxn\" (UniqueName: \"kubernetes.io/projected/e888152b-fd89-49d3-934a-7586b0fabf19-kube-api-access-bwpxn\") pod \"dnsmasq-dns-598cd8bf55-7dpb6\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:22 crc kubenswrapper[4730]: I0930 10:11:22.438332 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.154421 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-598cd8bf55-7dpb6"] Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.306422 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr"] Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.308095 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.310922 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.310946 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.311288 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.311333 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.319259 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr"] Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.458211 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" event={"ID":"e888152b-fd89-49d3-934a-7586b0fabf19","Type":"ContainerStarted","Data":"b24236b666266eeb7346ec3ce335a540cfc9cb6e8136b30074934710369d115e"} Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.461330 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vq49c\" (UniqueName: \"kubernetes.io/projected/575ec0cb-9c35-4f41-939d-3d80070464f4-kube-api-access-vq49c\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.461524 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.461713 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.461842 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.564943 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.565270 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.565308 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.565373 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vq49c\" (UniqueName: \"kubernetes.io/projected/575ec0cb-9c35-4f41-939d-3d80070464f4-kube-api-access-vq49c\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.571129 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.576276 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.577136 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.594471 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vq49c\" (UniqueName: \"kubernetes.io/projected/575ec0cb-9c35-4f41-939d-3d80070464f4-kube-api-access-vq49c\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:23 crc kubenswrapper[4730]: I0930 10:11:23.679407 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:11:24 crc kubenswrapper[4730]: I0930 10:11:24.195835 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr"] Sep 30 10:11:24 crc kubenswrapper[4730]: W0930 10:11:24.200522 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod575ec0cb_9c35_4f41_939d_3d80070464f4.slice/crio-117d6c72906d8bfe11737bf55586de075fb13a827e46ed74c700d064e8994048 WatchSource:0}: Error finding container 117d6c72906d8bfe11737bf55586de075fb13a827e46ed74c700d064e8994048: Status 404 returned error can't find the container with id 117d6c72906d8bfe11737bf55586de075fb13a827e46ed74c700d064e8994048 Sep 30 10:11:24 crc kubenswrapper[4730]: I0930 10:11:24.467582 4730 generic.go:334] "Generic (PLEG): container finished" podID="e888152b-fd89-49d3-934a-7586b0fabf19" containerID="a689eda78939a9e26e83604ee763aa90f44f1f184284552b53c614c665ea54b1" exitCode=0 Sep 30 10:11:24 crc kubenswrapper[4730]: I0930 10:11:24.467704 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" event={"ID":"e888152b-fd89-49d3-934a-7586b0fabf19","Type":"ContainerDied","Data":"a689eda78939a9e26e83604ee763aa90f44f1f184284552b53c614c665ea54b1"} Sep 30 10:11:24 crc kubenswrapper[4730]: I0930 10:11:24.468961 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" event={"ID":"575ec0cb-9c35-4f41-939d-3d80070464f4","Type":"ContainerStarted","Data":"117d6c72906d8bfe11737bf55586de075fb13a827e46ed74c700d064e8994048"} Sep 30 10:11:25 crc kubenswrapper[4730]: I0930 10:11:25.480602 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" event={"ID":"e888152b-fd89-49d3-934a-7586b0fabf19","Type":"ContainerStarted","Data":"2e5a066c48939e2a63fbcdf187ddc22bea6805126dde61c5600111bf897ca943"} Sep 30 10:11:25 crc kubenswrapper[4730]: I0930 10:11:25.480820 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:26 crc kubenswrapper[4730]: I0930 10:11:26.414134 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" podStartSLOduration=4.414105469 podStartE2EDuration="4.414105469s" podCreationTimestamp="2025-09-30 10:11:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:11:25.510019847 +0000 UTC m=+1329.843279850" watchObservedRunningTime="2025-09-30 10:11:26.414105469 +0000 UTC m=+1330.747365502" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.337061 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.337664 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.337739 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.338680 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d80d6bf84aad0f9e13029ef1a54a6e376ee3848702f4ba4ce0570e2a35ec8e0c"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.338756 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://d80d6bf84aad0f9e13029ef1a54a6e376ee3848702f4ba4ce0570e2a35ec8e0c" gracePeriod=600 Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.439796 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.524460 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fb4859d7f-dw2rw"] Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.525063 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" podUID="7a53a8ac-4c91-46a7-a299-f63ec271774b" containerName="dnsmasq-dns" containerID="cri-o://e68959d125802b7c3ccbadacb9e5bc923ab54b6fb0c03c1e0ed1bacd4e2649a8" gracePeriod=10 Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.558449 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="d80d6bf84aad0f9e13029ef1a54a6e376ee3848702f4ba4ce0570e2a35ec8e0c" exitCode=0 Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.558757 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"d80d6bf84aad0f9e13029ef1a54a6e376ee3848702f4ba4ce0570e2a35ec8e0c"} Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.558806 4730 scope.go:117] "RemoveContainer" containerID="3472c6d9d1cf6cc70effd10d384c0280a404f7b8fcfc840434d206e9db23adc4" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.560602 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" event={"ID":"575ec0cb-9c35-4f41-939d-3d80070464f4","Type":"ContainerStarted","Data":"b469053f9677578eaa1c30c35d47312338c5905f70d09701dbcfc318e073cbb9"} Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.585507 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" podStartSLOduration=2.055165904 podStartE2EDuration="9.585485348s" podCreationTimestamp="2025-09-30 10:11:23 +0000 UTC" firstStartedPulling="2025-09-30 10:11:24.202547778 +0000 UTC m=+1328.535807771" lastFinishedPulling="2025-09-30 10:11:31.732867222 +0000 UTC m=+1336.066127215" observedRunningTime="2025-09-30 10:11:32.583512566 +0000 UTC m=+1336.916772549" watchObservedRunningTime="2025-09-30 10:11:32.585485348 +0000 UTC m=+1336.918745341" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.712730 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c4f8cc8c-r8gbc"] Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.716662 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.732918 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c4f8cc8c-r8gbc"] Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.762512 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5r7l\" (UniqueName: \"kubernetes.io/projected/7562489e-f18e-470f-a208-8479d49513f9-kube-api-access-t5r7l\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.762787 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-dns-svc\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.762858 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-openstack-edpm-ipam\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.762878 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-config\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.762894 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-ovsdbserver-nb\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.763020 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-ovsdbserver-sb\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.864425 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-openstack-edpm-ipam\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.864470 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-config\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.864724 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-ovsdbserver-nb\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.865009 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-ovsdbserver-sb\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.865130 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5r7l\" (UniqueName: \"kubernetes.io/projected/7562489e-f18e-470f-a208-8479d49513f9-kube-api-access-t5r7l\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.865171 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-dns-svc\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.865549 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-openstack-edpm-ipam\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.865662 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-config\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.866028 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-ovsdbserver-nb\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.866214 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-ovsdbserver-sb\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.866588 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7562489e-f18e-470f-a208-8479d49513f9-dns-svc\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:32 crc kubenswrapper[4730]: I0930 10:11:32.898096 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5r7l\" (UniqueName: \"kubernetes.io/projected/7562489e-f18e-470f-a208-8479d49513f9-kube-api-access-t5r7l\") pod \"dnsmasq-dns-5c4f8cc8c-r8gbc\" (UID: \"7562489e-f18e-470f-a208-8479d49513f9\") " pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.099968 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.211965 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.377567 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-dns-svc\") pod \"7a53a8ac-4c91-46a7-a299-f63ec271774b\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.378006 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-config\") pod \"7a53a8ac-4c91-46a7-a299-f63ec271774b\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.378106 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-ovsdbserver-sb\") pod \"7a53a8ac-4c91-46a7-a299-f63ec271774b\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.378137 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-ovsdbserver-nb\") pod \"7a53a8ac-4c91-46a7-a299-f63ec271774b\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.378171 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7tfr\" (UniqueName: \"kubernetes.io/projected/7a53a8ac-4c91-46a7-a299-f63ec271774b-kube-api-access-k7tfr\") pod \"7a53a8ac-4c91-46a7-a299-f63ec271774b\" (UID: \"7a53a8ac-4c91-46a7-a299-f63ec271774b\") " Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.396398 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a53a8ac-4c91-46a7-a299-f63ec271774b-kube-api-access-k7tfr" (OuterVolumeSpecName: "kube-api-access-k7tfr") pod "7a53a8ac-4c91-46a7-a299-f63ec271774b" (UID: "7a53a8ac-4c91-46a7-a299-f63ec271774b"). InnerVolumeSpecName "kube-api-access-k7tfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.450572 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7a53a8ac-4c91-46a7-a299-f63ec271774b" (UID: "7a53a8ac-4c91-46a7-a299-f63ec271774b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.457241 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-config" (OuterVolumeSpecName: "config") pod "7a53a8ac-4c91-46a7-a299-f63ec271774b" (UID: "7a53a8ac-4c91-46a7-a299-f63ec271774b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.470378 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7a53a8ac-4c91-46a7-a299-f63ec271774b" (UID: "7a53a8ac-4c91-46a7-a299-f63ec271774b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.485643 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.486008 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7tfr\" (UniqueName: \"kubernetes.io/projected/7a53a8ac-4c91-46a7-a299-f63ec271774b-kube-api-access-k7tfr\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.486626 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7a53a8ac-4c91-46a7-a299-f63ec271774b" (UID: "7a53a8ac-4c91-46a7-a299-f63ec271774b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.486782 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.486809 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.574522 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44"} Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.582095 4730 generic.go:334] "Generic (PLEG): container finished" podID="7a53a8ac-4c91-46a7-a299-f63ec271774b" containerID="e68959d125802b7c3ccbadacb9e5bc923ab54b6fb0c03c1e0ed1bacd4e2649a8" exitCode=0 Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.582223 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.582681 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" event={"ID":"7a53a8ac-4c91-46a7-a299-f63ec271774b","Type":"ContainerDied","Data":"e68959d125802b7c3ccbadacb9e5bc923ab54b6fb0c03c1e0ed1bacd4e2649a8"} Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.582742 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fb4859d7f-dw2rw" event={"ID":"7a53a8ac-4c91-46a7-a299-f63ec271774b","Type":"ContainerDied","Data":"501f871db4eb0dbfb4f3efba4ba9bd2499e52940cffba931a420cbffa8fa4b4c"} Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.582766 4730 scope.go:117] "RemoveContainer" containerID="e68959d125802b7c3ccbadacb9e5bc923ab54b6fb0c03c1e0ed1bacd4e2649a8" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.591381 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a53a8ac-4c91-46a7-a299-f63ec271774b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.628525 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c4f8cc8c-r8gbc"] Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.631220 4730 scope.go:117] "RemoveContainer" containerID="b2db242ed5b1d797e7c38a18b0ddf9f7d7aced1909d532c43579d0cda33bc6cf" Sep 30 10:11:33 crc kubenswrapper[4730]: W0930 10:11:33.633891 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7562489e_f18e_470f_a208_8479d49513f9.slice/crio-d48e321d9b4687393f7cea97ed9f46c2f39959c55f048d9decec97a10a658f42 WatchSource:0}: Error finding container d48e321d9b4687393f7cea97ed9f46c2f39959c55f048d9decec97a10a658f42: Status 404 returned error can't find the container with id d48e321d9b4687393f7cea97ed9f46c2f39959c55f048d9decec97a10a658f42 Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.641359 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fb4859d7f-dw2rw"] Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.651201 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5fb4859d7f-dw2rw"] Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.658269 4730 scope.go:117] "RemoveContainer" containerID="e68959d125802b7c3ccbadacb9e5bc923ab54b6fb0c03c1e0ed1bacd4e2649a8" Sep 30 10:11:33 crc kubenswrapper[4730]: E0930 10:11:33.658794 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e68959d125802b7c3ccbadacb9e5bc923ab54b6fb0c03c1e0ed1bacd4e2649a8\": container with ID starting with e68959d125802b7c3ccbadacb9e5bc923ab54b6fb0c03c1e0ed1bacd4e2649a8 not found: ID does not exist" containerID="e68959d125802b7c3ccbadacb9e5bc923ab54b6fb0c03c1e0ed1bacd4e2649a8" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.658862 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e68959d125802b7c3ccbadacb9e5bc923ab54b6fb0c03c1e0ed1bacd4e2649a8"} err="failed to get container status \"e68959d125802b7c3ccbadacb9e5bc923ab54b6fb0c03c1e0ed1bacd4e2649a8\": rpc error: code = NotFound desc = could not find container \"e68959d125802b7c3ccbadacb9e5bc923ab54b6fb0c03c1e0ed1bacd4e2649a8\": container with ID starting with e68959d125802b7c3ccbadacb9e5bc923ab54b6fb0c03c1e0ed1bacd4e2649a8 not found: ID does not exist" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.658888 4730 scope.go:117] "RemoveContainer" containerID="b2db242ed5b1d797e7c38a18b0ddf9f7d7aced1909d532c43579d0cda33bc6cf" Sep 30 10:11:33 crc kubenswrapper[4730]: E0930 10:11:33.659232 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2db242ed5b1d797e7c38a18b0ddf9f7d7aced1909d532c43579d0cda33bc6cf\": container with ID starting with b2db242ed5b1d797e7c38a18b0ddf9f7d7aced1909d532c43579d0cda33bc6cf not found: ID does not exist" containerID="b2db242ed5b1d797e7c38a18b0ddf9f7d7aced1909d532c43579d0cda33bc6cf" Sep 30 10:11:33 crc kubenswrapper[4730]: I0930 10:11:33.659282 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2db242ed5b1d797e7c38a18b0ddf9f7d7aced1909d532c43579d0cda33bc6cf"} err="failed to get container status \"b2db242ed5b1d797e7c38a18b0ddf9f7d7aced1909d532c43579d0cda33bc6cf\": rpc error: code = NotFound desc = could not find container \"b2db242ed5b1d797e7c38a18b0ddf9f7d7aced1909d532c43579d0cda33bc6cf\": container with ID starting with b2db242ed5b1d797e7c38a18b0ddf9f7d7aced1909d532c43579d0cda33bc6cf not found: ID does not exist" Sep 30 10:11:34 crc kubenswrapper[4730]: I0930 10:11:34.391271 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a53a8ac-4c91-46a7-a299-f63ec271774b" path="/var/lib/kubelet/pods/7a53a8ac-4c91-46a7-a299-f63ec271774b/volumes" Sep 30 10:11:34 crc kubenswrapper[4730]: I0930 10:11:34.594380 4730 generic.go:334] "Generic (PLEG): container finished" podID="7562489e-f18e-470f-a208-8479d49513f9" containerID="9b2a43c9ca45dd8dd928299da42463d182c71d19f672dfaec7b5c88116af8a81" exitCode=0 Sep 30 10:11:34 crc kubenswrapper[4730]: I0930 10:11:34.594487 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" event={"ID":"7562489e-f18e-470f-a208-8479d49513f9","Type":"ContainerDied","Data":"9b2a43c9ca45dd8dd928299da42463d182c71d19f672dfaec7b5c88116af8a81"} Sep 30 10:11:34 crc kubenswrapper[4730]: I0930 10:11:34.594552 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" event={"ID":"7562489e-f18e-470f-a208-8479d49513f9","Type":"ContainerStarted","Data":"d48e321d9b4687393f7cea97ed9f46c2f39959c55f048d9decec97a10a658f42"} Sep 30 10:11:35 crc kubenswrapper[4730]: I0930 10:11:35.606315 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" event={"ID":"7562489e-f18e-470f-a208-8479d49513f9","Type":"ContainerStarted","Data":"23024f526313e5cc84e186d2fd188e2c21d26823d838c529487bdad5e21cf2f4"} Sep 30 10:11:35 crc kubenswrapper[4730]: I0930 10:11:35.607050 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:35 crc kubenswrapper[4730]: I0930 10:11:35.629008 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" podStartSLOduration=3.628988304 podStartE2EDuration="3.628988304s" podCreationTimestamp="2025-09-30 10:11:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:11:35.622079605 +0000 UTC m=+1339.955339628" watchObservedRunningTime="2025-09-30 10:11:35.628988304 +0000 UTC m=+1339.962248297" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.102165 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c4f8cc8c-r8gbc" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.157999 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-598cd8bf55-7dpb6"] Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.158674 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" podUID="e888152b-fd89-49d3-934a-7586b0fabf19" containerName="dnsmasq-dns" containerID="cri-o://2e5a066c48939e2a63fbcdf187ddc22bea6805126dde61c5600111bf897ca943" gracePeriod=10 Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.695174 4730 generic.go:334] "Generic (PLEG): container finished" podID="e888152b-fd89-49d3-934a-7586b0fabf19" containerID="2e5a066c48939e2a63fbcdf187ddc22bea6805126dde61c5600111bf897ca943" exitCode=0 Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.695366 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" event={"ID":"e888152b-fd89-49d3-934a-7586b0fabf19","Type":"ContainerDied","Data":"2e5a066c48939e2a63fbcdf187ddc22bea6805126dde61c5600111bf897ca943"} Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.695479 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" event={"ID":"e888152b-fd89-49d3-934a-7586b0fabf19","Type":"ContainerDied","Data":"b24236b666266eeb7346ec3ce335a540cfc9cb6e8136b30074934710369d115e"} Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.695495 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b24236b666266eeb7346ec3ce335a540cfc9cb6e8136b30074934710369d115e" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.753535 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.879334 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwpxn\" (UniqueName: \"kubernetes.io/projected/e888152b-fd89-49d3-934a-7586b0fabf19-kube-api-access-bwpxn\") pod \"e888152b-fd89-49d3-934a-7586b0fabf19\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.879505 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-ovsdbserver-sb\") pod \"e888152b-fd89-49d3-934a-7586b0fabf19\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.879546 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-dns-svc\") pod \"e888152b-fd89-49d3-934a-7586b0fabf19\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.879581 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-config\") pod \"e888152b-fd89-49d3-934a-7586b0fabf19\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.879645 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-ovsdbserver-nb\") pod \"e888152b-fd89-49d3-934a-7586b0fabf19\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.879723 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-openstack-edpm-ipam\") pod \"e888152b-fd89-49d3-934a-7586b0fabf19\" (UID: \"e888152b-fd89-49d3-934a-7586b0fabf19\") " Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.886901 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e888152b-fd89-49d3-934a-7586b0fabf19-kube-api-access-bwpxn" (OuterVolumeSpecName: "kube-api-access-bwpxn") pod "e888152b-fd89-49d3-934a-7586b0fabf19" (UID: "e888152b-fd89-49d3-934a-7586b0fabf19"). InnerVolumeSpecName "kube-api-access-bwpxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.928298 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-config" (OuterVolumeSpecName: "config") pod "e888152b-fd89-49d3-934a-7586b0fabf19" (UID: "e888152b-fd89-49d3-934a-7586b0fabf19"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.932339 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e888152b-fd89-49d3-934a-7586b0fabf19" (UID: "e888152b-fd89-49d3-934a-7586b0fabf19"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.934399 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e888152b-fd89-49d3-934a-7586b0fabf19" (UID: "e888152b-fd89-49d3-934a-7586b0fabf19"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.948535 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e888152b-fd89-49d3-934a-7586b0fabf19" (UID: "e888152b-fd89-49d3-934a-7586b0fabf19"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.963039 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "e888152b-fd89-49d3-934a-7586b0fabf19" (UID: "e888152b-fd89-49d3-934a-7586b0fabf19"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.982121 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwpxn\" (UniqueName: \"kubernetes.io/projected/e888152b-fd89-49d3-934a-7586b0fabf19-kube-api-access-bwpxn\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.982164 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.982177 4730 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.982188 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.982204 4730 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:43 crc kubenswrapper[4730]: I0930 10:11:43.982214 4730 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/e888152b-fd89-49d3-934a-7586b0fabf19-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 30 10:11:44 crc kubenswrapper[4730]: I0930 10:11:44.703421 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-598cd8bf55-7dpb6" Sep 30 10:11:44 crc kubenswrapper[4730]: I0930 10:11:44.740737 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-598cd8bf55-7dpb6"] Sep 30 10:11:44 crc kubenswrapper[4730]: I0930 10:11:44.754709 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-598cd8bf55-7dpb6"] Sep 30 10:11:45 crc kubenswrapper[4730]: I0930 10:11:45.716740 4730 generic.go:334] "Generic (PLEG): container finished" podID="a3b79a67-6ca7-44cd-8108-9afb64437809" containerID="a678affb68479ac2cacb8184e40d3956918760bfa336bc83665f28bfcc2a3d4d" exitCode=0 Sep 30 10:11:45 crc kubenswrapper[4730]: I0930 10:11:45.716813 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a3b79a67-6ca7-44cd-8108-9afb64437809","Type":"ContainerDied","Data":"a678affb68479ac2cacb8184e40d3956918760bfa336bc83665f28bfcc2a3d4d"} Sep 30 10:11:45 crc kubenswrapper[4730]: I0930 10:11:45.719270 4730 generic.go:334] "Generic (PLEG): container finished" podID="a1a78aec-c35b-41c6-a1e0-43fba77e84fd" containerID="ad6fde4f023a3596826798f14db6bd29f9d21331f98ef9a18564e2d0997ae1dd" exitCode=0 Sep 30 10:11:45 crc kubenswrapper[4730]: I0930 10:11:45.719298 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a1a78aec-c35b-41c6-a1e0-43fba77e84fd","Type":"ContainerDied","Data":"ad6fde4f023a3596826798f14db6bd29f9d21331f98ef9a18564e2d0997ae1dd"} Sep 30 10:11:46 crc kubenswrapper[4730]: I0930 10:11:46.392860 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e888152b-fd89-49d3-934a-7586b0fabf19" path="/var/lib/kubelet/pods/e888152b-fd89-49d3-934a-7586b0fabf19/volumes" Sep 30 10:11:46 crc kubenswrapper[4730]: I0930 10:11:46.735170 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a3b79a67-6ca7-44cd-8108-9afb64437809","Type":"ContainerStarted","Data":"fd2164b322924c229516c0829be4686dd347c10d5d58e530799fd928c91caf2e"} Sep 30 10:11:46 crc kubenswrapper[4730]: I0930 10:11:46.735527 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:11:46 crc kubenswrapper[4730]: I0930 10:11:46.737803 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a1a78aec-c35b-41c6-a1e0-43fba77e84fd","Type":"ContainerStarted","Data":"05f3a7c3bfde111b6339f5a7ddf3b5e90b3c426ad80991daa879e91e6be7a6ce"} Sep 30 10:11:46 crc kubenswrapper[4730]: I0930 10:11:46.738370 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 10:11:46 crc kubenswrapper[4730]: I0930 10:11:46.792232 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=33.792216185 podStartE2EDuration="33.792216185s" podCreationTimestamp="2025-09-30 10:11:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:11:46.786879957 +0000 UTC m=+1351.120139950" watchObservedRunningTime="2025-09-30 10:11:46.792216185 +0000 UTC m=+1351.125476178" Sep 30 10:11:46 crc kubenswrapper[4730]: I0930 10:11:46.794251 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=33.794243549 podStartE2EDuration="33.794243549s" podCreationTimestamp="2025-09-30 10:11:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:11:46.767559862 +0000 UTC m=+1351.100819855" watchObservedRunningTime="2025-09-30 10:11:46.794243549 +0000 UTC m=+1351.127503542" Sep 30 10:11:58 crc kubenswrapper[4730]: I0930 10:11:58.918556 4730 generic.go:334] "Generic (PLEG): container finished" podID="575ec0cb-9c35-4f41-939d-3d80070464f4" containerID="b469053f9677578eaa1c30c35d47312338c5905f70d09701dbcfc318e073cbb9" exitCode=0 Sep 30 10:11:58 crc kubenswrapper[4730]: I0930 10:11:58.918642 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" event={"ID":"575ec0cb-9c35-4f41-939d-3d80070464f4","Type":"ContainerDied","Data":"b469053f9677578eaa1c30c35d47312338c5905f70d09701dbcfc318e073cbb9"} Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.427091 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.485358 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-ssh-key\") pod \"575ec0cb-9c35-4f41-939d-3d80070464f4\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.485512 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-inventory\") pod \"575ec0cb-9c35-4f41-939d-3d80070464f4\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.485559 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-repo-setup-combined-ca-bundle\") pod \"575ec0cb-9c35-4f41-939d-3d80070464f4\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.485587 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vq49c\" (UniqueName: \"kubernetes.io/projected/575ec0cb-9c35-4f41-939d-3d80070464f4-kube-api-access-vq49c\") pod \"575ec0cb-9c35-4f41-939d-3d80070464f4\" (UID: \"575ec0cb-9c35-4f41-939d-3d80070464f4\") " Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.491673 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/575ec0cb-9c35-4f41-939d-3d80070464f4-kube-api-access-vq49c" (OuterVolumeSpecName: "kube-api-access-vq49c") pod "575ec0cb-9c35-4f41-939d-3d80070464f4" (UID: "575ec0cb-9c35-4f41-939d-3d80070464f4"). InnerVolumeSpecName "kube-api-access-vq49c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.491678 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "575ec0cb-9c35-4f41-939d-3d80070464f4" (UID: "575ec0cb-9c35-4f41-939d-3d80070464f4"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.512991 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "575ec0cb-9c35-4f41-939d-3d80070464f4" (UID: "575ec0cb-9c35-4f41-939d-3d80070464f4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.519727 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-inventory" (OuterVolumeSpecName: "inventory") pod "575ec0cb-9c35-4f41-939d-3d80070464f4" (UID: "575ec0cb-9c35-4f41-939d-3d80070464f4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.587784 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.587830 4730 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.587846 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vq49c\" (UniqueName: \"kubernetes.io/projected/575ec0cb-9c35-4f41-939d-3d80070464f4-kube-api-access-vq49c\") on node \"crc\" DevicePath \"\"" Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.587867 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/575ec0cb-9c35-4f41-939d-3d80070464f4-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.940733 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" event={"ID":"575ec0cb-9c35-4f41-939d-3d80070464f4","Type":"ContainerDied","Data":"117d6c72906d8bfe11737bf55586de075fb13a827e46ed74c700d064e8994048"} Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.941065 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="117d6c72906d8bfe11737bf55586de075fb13a827e46ed74c700d064e8994048" Sep 30 10:12:00 crc kubenswrapper[4730]: I0930 10:12:00.940780 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.016658 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk"] Sep 30 10:12:01 crc kubenswrapper[4730]: E0930 10:12:01.017094 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a53a8ac-4c91-46a7-a299-f63ec271774b" containerName="dnsmasq-dns" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.017115 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a53a8ac-4c91-46a7-a299-f63ec271774b" containerName="dnsmasq-dns" Sep 30 10:12:01 crc kubenswrapper[4730]: E0930 10:12:01.017135 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e888152b-fd89-49d3-934a-7586b0fabf19" containerName="init" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.017143 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="e888152b-fd89-49d3-934a-7586b0fabf19" containerName="init" Sep 30 10:12:01 crc kubenswrapper[4730]: E0930 10:12:01.017160 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a53a8ac-4c91-46a7-a299-f63ec271774b" containerName="init" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.017168 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a53a8ac-4c91-46a7-a299-f63ec271774b" containerName="init" Sep 30 10:12:01 crc kubenswrapper[4730]: E0930 10:12:01.017185 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="575ec0cb-9c35-4f41-939d-3d80070464f4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.017196 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="575ec0cb-9c35-4f41-939d-3d80070464f4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 10:12:01 crc kubenswrapper[4730]: E0930 10:12:01.017235 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e888152b-fd89-49d3-934a-7586b0fabf19" containerName="dnsmasq-dns" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.017243 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="e888152b-fd89-49d3-934a-7586b0fabf19" containerName="dnsmasq-dns" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.017456 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="575ec0cb-9c35-4f41-939d-3d80070464f4" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.017475 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a53a8ac-4c91-46a7-a299-f63ec271774b" containerName="dnsmasq-dns" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.017494 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="e888152b-fd89-49d3-934a-7586b0fabf19" containerName="dnsmasq-dns" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.018170 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.021268 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.021440 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.021717 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.026116 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.029290 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk"] Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.097495 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.097594 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.097629 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.097688 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8cq4\" (UniqueName: \"kubernetes.io/projected/94e7b621-95ed-40de-bf21-f3398f10bace-kube-api-access-s8cq4\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.199131 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.199564 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.199707 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.199845 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8cq4\" (UniqueName: \"kubernetes.io/projected/94e7b621-95ed-40de-bf21-f3398f10bace-kube-api-access-s8cq4\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.203147 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.203697 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.204970 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.217083 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8cq4\" (UniqueName: \"kubernetes.io/projected/94e7b621-95ed-40de-bf21-f3398f10bace-kube-api-access-s8cq4\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.339647 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.861035 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk"] Sep 30 10:12:01 crc kubenswrapper[4730]: I0930 10:12:01.949347 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" event={"ID":"94e7b621-95ed-40de-bf21-f3398f10bace","Type":"ContainerStarted","Data":"f585faedc3645366ff6e00d59c2e07fcfef7b3e1ab7ecb07cf585b78a64131bb"} Sep 30 10:12:02 crc kubenswrapper[4730]: I0930 10:12:02.962045 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" event={"ID":"94e7b621-95ed-40de-bf21-f3398f10bace","Type":"ContainerStarted","Data":"e07ea6d734f17ae651a76201ba3c6c4296a3176aa665456a828480cc6a95e800"} Sep 30 10:12:02 crc kubenswrapper[4730]: I0930 10:12:02.979258 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" podStartSLOduration=2.574987751 podStartE2EDuration="2.979240483s" podCreationTimestamp="2025-09-30 10:12:00 +0000 UTC" firstStartedPulling="2025-09-30 10:12:01.867059506 +0000 UTC m=+1366.200319499" lastFinishedPulling="2025-09-30 10:12:02.271312238 +0000 UTC m=+1366.604572231" observedRunningTime="2025-09-30 10:12:02.975206988 +0000 UTC m=+1367.308466981" watchObservedRunningTime="2025-09-30 10:12:02.979240483 +0000 UTC m=+1367.312500476" Sep 30 10:12:03 crc kubenswrapper[4730]: I0930 10:12:03.787863 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 10:12:03 crc kubenswrapper[4730]: I0930 10:12:03.857808 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 10:12:09 crc kubenswrapper[4730]: I0930 10:12:09.971548 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-84vh4"] Sep 30 10:12:09 crc kubenswrapper[4730]: I0930 10:12:09.974866 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:09 crc kubenswrapper[4730]: I0930 10:12:09.981964 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-84vh4"] Sep 30 10:12:10 crc kubenswrapper[4730]: I0930 10:12:10.068148 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-catalog-content\") pod \"redhat-marketplace-84vh4\" (UID: \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\") " pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:10 crc kubenswrapper[4730]: I0930 10:12:10.068214 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8n74\" (UniqueName: \"kubernetes.io/projected/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-kube-api-access-p8n74\") pod \"redhat-marketplace-84vh4\" (UID: \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\") " pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:10 crc kubenswrapper[4730]: I0930 10:12:10.068452 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-utilities\") pod \"redhat-marketplace-84vh4\" (UID: \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\") " pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:10 crc kubenswrapper[4730]: I0930 10:12:10.170485 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-catalog-content\") pod \"redhat-marketplace-84vh4\" (UID: \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\") " pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:10 crc kubenswrapper[4730]: I0930 10:12:10.170542 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8n74\" (UniqueName: \"kubernetes.io/projected/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-kube-api-access-p8n74\") pod \"redhat-marketplace-84vh4\" (UID: \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\") " pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:10 crc kubenswrapper[4730]: I0930 10:12:10.170590 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-utilities\") pod \"redhat-marketplace-84vh4\" (UID: \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\") " pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:10 crc kubenswrapper[4730]: I0930 10:12:10.171202 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-utilities\") pod \"redhat-marketplace-84vh4\" (UID: \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\") " pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:10 crc kubenswrapper[4730]: I0930 10:12:10.171481 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-catalog-content\") pod \"redhat-marketplace-84vh4\" (UID: \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\") " pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:10 crc kubenswrapper[4730]: I0930 10:12:10.192009 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8n74\" (UniqueName: \"kubernetes.io/projected/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-kube-api-access-p8n74\") pod \"redhat-marketplace-84vh4\" (UID: \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\") " pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:10 crc kubenswrapper[4730]: I0930 10:12:10.310917 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:10 crc kubenswrapper[4730]: I0930 10:12:10.782208 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-84vh4"] Sep 30 10:12:11 crc kubenswrapper[4730]: I0930 10:12:11.033351 4730 generic.go:334] "Generic (PLEG): container finished" podID="9cdd5b15-109a-411a-ac8b-b8daeef28b2e" containerID="da5a1371c3dafaeb976199da9d3b0f22572c47dd0bd80e03cb24af91a9066c68" exitCode=0 Sep 30 10:12:11 crc kubenswrapper[4730]: I0930 10:12:11.033418 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-84vh4" event={"ID":"9cdd5b15-109a-411a-ac8b-b8daeef28b2e","Type":"ContainerDied","Data":"da5a1371c3dafaeb976199da9d3b0f22572c47dd0bd80e03cb24af91a9066c68"} Sep 30 10:12:11 crc kubenswrapper[4730]: I0930 10:12:11.033693 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-84vh4" event={"ID":"9cdd5b15-109a-411a-ac8b-b8daeef28b2e","Type":"ContainerStarted","Data":"cbb425c6c80d6a4dbde1b0ffa11fd78f0f548db65be55048790b710f15fd39e2"} Sep 30 10:12:12 crc kubenswrapper[4730]: I0930 10:12:12.045150 4730 generic.go:334] "Generic (PLEG): container finished" podID="9cdd5b15-109a-411a-ac8b-b8daeef28b2e" containerID="ad800c11adc33f4c8159b735159b08a937f4d1e023bad1a97155613364f61471" exitCode=0 Sep 30 10:12:12 crc kubenswrapper[4730]: I0930 10:12:12.045244 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-84vh4" event={"ID":"9cdd5b15-109a-411a-ac8b-b8daeef28b2e","Type":"ContainerDied","Data":"ad800c11adc33f4c8159b735159b08a937f4d1e023bad1a97155613364f61471"} Sep 30 10:12:13 crc kubenswrapper[4730]: I0930 10:12:13.056510 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-84vh4" event={"ID":"9cdd5b15-109a-411a-ac8b-b8daeef28b2e","Type":"ContainerStarted","Data":"2223b6e65986ca868d23780e610b3f07d4b7cf73be6b0dd123d9de08cfba0b3c"} Sep 30 10:12:13 crc kubenswrapper[4730]: I0930 10:12:13.077574 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-84vh4" podStartSLOduration=2.649251395 podStartE2EDuration="4.077558312s" podCreationTimestamp="2025-09-30 10:12:09 +0000 UTC" firstStartedPulling="2025-09-30 10:12:11.035969361 +0000 UTC m=+1375.369229354" lastFinishedPulling="2025-09-30 10:12:12.464276278 +0000 UTC m=+1376.797536271" observedRunningTime="2025-09-30 10:12:13.072715855 +0000 UTC m=+1377.405975858" watchObservedRunningTime="2025-09-30 10:12:13.077558312 +0000 UTC m=+1377.410818305" Sep 30 10:12:20 crc kubenswrapper[4730]: I0930 10:12:20.311896 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:20 crc kubenswrapper[4730]: I0930 10:12:20.312480 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:20 crc kubenswrapper[4730]: I0930 10:12:20.359604 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:21 crc kubenswrapper[4730]: I0930 10:12:21.184363 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:21 crc kubenswrapper[4730]: I0930 10:12:21.232652 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-84vh4"] Sep 30 10:12:23 crc kubenswrapper[4730]: I0930 10:12:23.159912 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-84vh4" podUID="9cdd5b15-109a-411a-ac8b-b8daeef28b2e" containerName="registry-server" containerID="cri-o://2223b6e65986ca868d23780e610b3f07d4b7cf73be6b0dd123d9de08cfba0b3c" gracePeriod=2 Sep 30 10:12:23 crc kubenswrapper[4730]: I0930 10:12:23.618379 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:23 crc kubenswrapper[4730]: I0930 10:12:23.744734 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-utilities\") pod \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\" (UID: \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\") " Sep 30 10:12:23 crc kubenswrapper[4730]: I0930 10:12:23.744910 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8n74\" (UniqueName: \"kubernetes.io/projected/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-kube-api-access-p8n74\") pod \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\" (UID: \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\") " Sep 30 10:12:23 crc kubenswrapper[4730]: I0930 10:12:23.745168 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-catalog-content\") pod \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\" (UID: \"9cdd5b15-109a-411a-ac8b-b8daeef28b2e\") " Sep 30 10:12:23 crc kubenswrapper[4730]: I0930 10:12:23.745995 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-utilities" (OuterVolumeSpecName: "utilities") pod "9cdd5b15-109a-411a-ac8b-b8daeef28b2e" (UID: "9cdd5b15-109a-411a-ac8b-b8daeef28b2e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:12:23 crc kubenswrapper[4730]: I0930 10:12:23.753540 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-kube-api-access-p8n74" (OuterVolumeSpecName: "kube-api-access-p8n74") pod "9cdd5b15-109a-411a-ac8b-b8daeef28b2e" (UID: "9cdd5b15-109a-411a-ac8b-b8daeef28b2e"). InnerVolumeSpecName "kube-api-access-p8n74". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:12:23 crc kubenswrapper[4730]: I0930 10:12:23.757273 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9cdd5b15-109a-411a-ac8b-b8daeef28b2e" (UID: "9cdd5b15-109a-411a-ac8b-b8daeef28b2e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:12:23 crc kubenswrapper[4730]: I0930 10:12:23.847647 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:12:23 crc kubenswrapper[4730]: I0930 10:12:23.848281 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:12:23 crc kubenswrapper[4730]: I0930 10:12:23.848376 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8n74\" (UniqueName: \"kubernetes.io/projected/9cdd5b15-109a-411a-ac8b-b8daeef28b2e-kube-api-access-p8n74\") on node \"crc\" DevicePath \"\"" Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.170798 4730 generic.go:334] "Generic (PLEG): container finished" podID="9cdd5b15-109a-411a-ac8b-b8daeef28b2e" containerID="2223b6e65986ca868d23780e610b3f07d4b7cf73be6b0dd123d9de08cfba0b3c" exitCode=0 Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.171042 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-84vh4" event={"ID":"9cdd5b15-109a-411a-ac8b-b8daeef28b2e","Type":"ContainerDied","Data":"2223b6e65986ca868d23780e610b3f07d4b7cf73be6b0dd123d9de08cfba0b3c"} Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.172020 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-84vh4" event={"ID":"9cdd5b15-109a-411a-ac8b-b8daeef28b2e","Type":"ContainerDied","Data":"cbb425c6c80d6a4dbde1b0ffa11fd78f0f548db65be55048790b710f15fd39e2"} Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.171221 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-84vh4" Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.172062 4730 scope.go:117] "RemoveContainer" containerID="2223b6e65986ca868d23780e610b3f07d4b7cf73be6b0dd123d9de08cfba0b3c" Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.196505 4730 scope.go:117] "RemoveContainer" containerID="ad800c11adc33f4c8159b735159b08a937f4d1e023bad1a97155613364f61471" Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.217464 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-84vh4"] Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.231139 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-84vh4"] Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.236117 4730 scope.go:117] "RemoveContainer" containerID="da5a1371c3dafaeb976199da9d3b0f22572c47dd0bd80e03cb24af91a9066c68" Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.284267 4730 scope.go:117] "RemoveContainer" containerID="2223b6e65986ca868d23780e610b3f07d4b7cf73be6b0dd123d9de08cfba0b3c" Sep 30 10:12:24 crc kubenswrapper[4730]: E0930 10:12:24.285229 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2223b6e65986ca868d23780e610b3f07d4b7cf73be6b0dd123d9de08cfba0b3c\": container with ID starting with 2223b6e65986ca868d23780e610b3f07d4b7cf73be6b0dd123d9de08cfba0b3c not found: ID does not exist" containerID="2223b6e65986ca868d23780e610b3f07d4b7cf73be6b0dd123d9de08cfba0b3c" Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.285280 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2223b6e65986ca868d23780e610b3f07d4b7cf73be6b0dd123d9de08cfba0b3c"} err="failed to get container status \"2223b6e65986ca868d23780e610b3f07d4b7cf73be6b0dd123d9de08cfba0b3c\": rpc error: code = NotFound desc = could not find container \"2223b6e65986ca868d23780e610b3f07d4b7cf73be6b0dd123d9de08cfba0b3c\": container with ID starting with 2223b6e65986ca868d23780e610b3f07d4b7cf73be6b0dd123d9de08cfba0b3c not found: ID does not exist" Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.285309 4730 scope.go:117] "RemoveContainer" containerID="ad800c11adc33f4c8159b735159b08a937f4d1e023bad1a97155613364f61471" Sep 30 10:12:24 crc kubenswrapper[4730]: E0930 10:12:24.286971 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad800c11adc33f4c8159b735159b08a937f4d1e023bad1a97155613364f61471\": container with ID starting with ad800c11adc33f4c8159b735159b08a937f4d1e023bad1a97155613364f61471 not found: ID does not exist" containerID="ad800c11adc33f4c8159b735159b08a937f4d1e023bad1a97155613364f61471" Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.287035 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad800c11adc33f4c8159b735159b08a937f4d1e023bad1a97155613364f61471"} err="failed to get container status \"ad800c11adc33f4c8159b735159b08a937f4d1e023bad1a97155613364f61471\": rpc error: code = NotFound desc = could not find container \"ad800c11adc33f4c8159b735159b08a937f4d1e023bad1a97155613364f61471\": container with ID starting with ad800c11adc33f4c8159b735159b08a937f4d1e023bad1a97155613364f61471 not found: ID does not exist" Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.287073 4730 scope.go:117] "RemoveContainer" containerID="da5a1371c3dafaeb976199da9d3b0f22572c47dd0bd80e03cb24af91a9066c68" Sep 30 10:12:24 crc kubenswrapper[4730]: E0930 10:12:24.288520 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da5a1371c3dafaeb976199da9d3b0f22572c47dd0bd80e03cb24af91a9066c68\": container with ID starting with da5a1371c3dafaeb976199da9d3b0f22572c47dd0bd80e03cb24af91a9066c68 not found: ID does not exist" containerID="da5a1371c3dafaeb976199da9d3b0f22572c47dd0bd80e03cb24af91a9066c68" Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.288547 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da5a1371c3dafaeb976199da9d3b0f22572c47dd0bd80e03cb24af91a9066c68"} err="failed to get container status \"da5a1371c3dafaeb976199da9d3b0f22572c47dd0bd80e03cb24af91a9066c68\": rpc error: code = NotFound desc = could not find container \"da5a1371c3dafaeb976199da9d3b0f22572c47dd0bd80e03cb24af91a9066c68\": container with ID starting with da5a1371c3dafaeb976199da9d3b0f22572c47dd0bd80e03cb24af91a9066c68 not found: ID does not exist" Sep 30 10:12:24 crc kubenswrapper[4730]: I0930 10:12:24.391843 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cdd5b15-109a-411a-ac8b-b8daeef28b2e" path="/var/lib/kubelet/pods/9cdd5b15-109a-411a-ac8b-b8daeef28b2e/volumes" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.069233 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-p7q4l"] Sep 30 10:12:54 crc kubenswrapper[4730]: E0930 10:12:54.070304 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cdd5b15-109a-411a-ac8b-b8daeef28b2e" containerName="extract-utilities" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.070322 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cdd5b15-109a-411a-ac8b-b8daeef28b2e" containerName="extract-utilities" Sep 30 10:12:54 crc kubenswrapper[4730]: E0930 10:12:54.070351 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cdd5b15-109a-411a-ac8b-b8daeef28b2e" containerName="extract-content" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.070359 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cdd5b15-109a-411a-ac8b-b8daeef28b2e" containerName="extract-content" Sep 30 10:12:54 crc kubenswrapper[4730]: E0930 10:12:54.070386 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cdd5b15-109a-411a-ac8b-b8daeef28b2e" containerName="registry-server" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.070393 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cdd5b15-109a-411a-ac8b-b8daeef28b2e" containerName="registry-server" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.070603 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cdd5b15-109a-411a-ac8b-b8daeef28b2e" containerName="registry-server" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.072195 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.079135 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p7q4l"] Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.129835 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6275d8b-919b-4d29-944b-f945f16f357d-utilities\") pod \"community-operators-p7q4l\" (UID: \"c6275d8b-919b-4d29-944b-f945f16f357d\") " pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.129903 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6275d8b-919b-4d29-944b-f945f16f357d-catalog-content\") pod \"community-operators-p7q4l\" (UID: \"c6275d8b-919b-4d29-944b-f945f16f357d\") " pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.130200 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7jmj\" (UniqueName: \"kubernetes.io/projected/c6275d8b-919b-4d29-944b-f945f16f357d-kube-api-access-z7jmj\") pod \"community-operators-p7q4l\" (UID: \"c6275d8b-919b-4d29-944b-f945f16f357d\") " pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.232522 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6275d8b-919b-4d29-944b-f945f16f357d-utilities\") pod \"community-operators-p7q4l\" (UID: \"c6275d8b-919b-4d29-944b-f945f16f357d\") " pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.233112 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6275d8b-919b-4d29-944b-f945f16f357d-catalog-content\") pod \"community-operators-p7q4l\" (UID: \"c6275d8b-919b-4d29-944b-f945f16f357d\") " pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.233052 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6275d8b-919b-4d29-944b-f945f16f357d-utilities\") pod \"community-operators-p7q4l\" (UID: \"c6275d8b-919b-4d29-944b-f945f16f357d\") " pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.233337 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6275d8b-919b-4d29-944b-f945f16f357d-catalog-content\") pod \"community-operators-p7q4l\" (UID: \"c6275d8b-919b-4d29-944b-f945f16f357d\") " pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.233495 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7jmj\" (UniqueName: \"kubernetes.io/projected/c6275d8b-919b-4d29-944b-f945f16f357d-kube-api-access-z7jmj\") pod \"community-operators-p7q4l\" (UID: \"c6275d8b-919b-4d29-944b-f945f16f357d\") " pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.257341 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7jmj\" (UniqueName: \"kubernetes.io/projected/c6275d8b-919b-4d29-944b-f945f16f357d-kube-api-access-z7jmj\") pod \"community-operators-p7q4l\" (UID: \"c6275d8b-919b-4d29-944b-f945f16f357d\") " pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.432079 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:12:54 crc kubenswrapper[4730]: I0930 10:12:54.974167 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p7q4l"] Sep 30 10:12:55 crc kubenswrapper[4730]: I0930 10:12:55.496834 4730 generic.go:334] "Generic (PLEG): container finished" podID="c6275d8b-919b-4d29-944b-f945f16f357d" containerID="9b1b09bfee4c9dff3a82b96d4b2daea08da4196c9af9929ac36870e2c86beb0a" exitCode=0 Sep 30 10:12:55 crc kubenswrapper[4730]: I0930 10:12:55.496913 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7q4l" event={"ID":"c6275d8b-919b-4d29-944b-f945f16f357d","Type":"ContainerDied","Data":"9b1b09bfee4c9dff3a82b96d4b2daea08da4196c9af9929ac36870e2c86beb0a"} Sep 30 10:12:55 crc kubenswrapper[4730]: I0930 10:12:55.497186 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7q4l" event={"ID":"c6275d8b-919b-4d29-944b-f945f16f357d","Type":"ContainerStarted","Data":"90470894a09b122fb4ff3f47da65481357e8f7a521667f17f4c0bb49561d2981"} Sep 30 10:12:58 crc kubenswrapper[4730]: I0930 10:12:58.524790 4730 generic.go:334] "Generic (PLEG): container finished" podID="c6275d8b-919b-4d29-944b-f945f16f357d" containerID="3dbe0c9bc39b767f04679cf3d171ee88efa4bfe0713fc5753ef8cdba8f693b09" exitCode=0 Sep 30 10:12:58 crc kubenswrapper[4730]: I0930 10:12:58.524865 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7q4l" event={"ID":"c6275d8b-919b-4d29-944b-f945f16f357d","Type":"ContainerDied","Data":"3dbe0c9bc39b767f04679cf3d171ee88efa4bfe0713fc5753ef8cdba8f693b09"} Sep 30 10:13:02 crc kubenswrapper[4730]: I0930 10:13:02.564222 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7q4l" event={"ID":"c6275d8b-919b-4d29-944b-f945f16f357d","Type":"ContainerStarted","Data":"2d4e51158be2b5f92a79b4e84bd74daa75acc55b400571b511744c0d41ce3479"} Sep 30 10:13:02 crc kubenswrapper[4730]: I0930 10:13:02.587155 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-p7q4l" podStartSLOduration=1.768301984 podStartE2EDuration="8.587132921s" podCreationTimestamp="2025-09-30 10:12:54 +0000 UTC" firstStartedPulling="2025-09-30 10:12:55.49996248 +0000 UTC m=+1419.833222483" lastFinishedPulling="2025-09-30 10:13:02.318793427 +0000 UTC m=+1426.652053420" observedRunningTime="2025-09-30 10:13:02.583797851 +0000 UTC m=+1426.917057854" watchObservedRunningTime="2025-09-30 10:13:02.587132921 +0000 UTC m=+1426.920392914" Sep 30 10:13:04 crc kubenswrapper[4730]: I0930 10:13:04.432338 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:13:04 crc kubenswrapper[4730]: I0930 10:13:04.432666 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:13:04 crc kubenswrapper[4730]: I0930 10:13:04.475964 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:13:14 crc kubenswrapper[4730]: I0930 10:13:14.482219 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:13:14 crc kubenswrapper[4730]: I0930 10:13:14.530244 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p7q4l"] Sep 30 10:13:14 crc kubenswrapper[4730]: I0930 10:13:14.672017 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-p7q4l" podUID="c6275d8b-919b-4d29-944b-f945f16f357d" containerName="registry-server" containerID="cri-o://2d4e51158be2b5f92a79b4e84bd74daa75acc55b400571b511744c0d41ce3479" gracePeriod=2 Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.108923 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.173935 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6275d8b-919b-4d29-944b-f945f16f357d-utilities\") pod \"c6275d8b-919b-4d29-944b-f945f16f357d\" (UID: \"c6275d8b-919b-4d29-944b-f945f16f357d\") " Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.174344 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6275d8b-919b-4d29-944b-f945f16f357d-catalog-content\") pod \"c6275d8b-919b-4d29-944b-f945f16f357d\" (UID: \"c6275d8b-919b-4d29-944b-f945f16f357d\") " Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.174522 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7jmj\" (UniqueName: \"kubernetes.io/projected/c6275d8b-919b-4d29-944b-f945f16f357d-kube-api-access-z7jmj\") pod \"c6275d8b-919b-4d29-944b-f945f16f357d\" (UID: \"c6275d8b-919b-4d29-944b-f945f16f357d\") " Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.177849 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6275d8b-919b-4d29-944b-f945f16f357d-utilities" (OuterVolumeSpecName: "utilities") pod "c6275d8b-919b-4d29-944b-f945f16f357d" (UID: "c6275d8b-919b-4d29-944b-f945f16f357d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.182466 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6275d8b-919b-4d29-944b-f945f16f357d-kube-api-access-z7jmj" (OuterVolumeSpecName: "kube-api-access-z7jmj") pod "c6275d8b-919b-4d29-944b-f945f16f357d" (UID: "c6275d8b-919b-4d29-944b-f945f16f357d"). InnerVolumeSpecName "kube-api-access-z7jmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.229700 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6275d8b-919b-4d29-944b-f945f16f357d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c6275d8b-919b-4d29-944b-f945f16f357d" (UID: "c6275d8b-919b-4d29-944b-f945f16f357d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.276756 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6275d8b-919b-4d29-944b-f945f16f357d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.276792 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6275d8b-919b-4d29-944b-f945f16f357d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.276805 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7jmj\" (UniqueName: \"kubernetes.io/projected/c6275d8b-919b-4d29-944b-f945f16f357d-kube-api-access-z7jmj\") on node \"crc\" DevicePath \"\"" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.685563 4730 generic.go:334] "Generic (PLEG): container finished" podID="c6275d8b-919b-4d29-944b-f945f16f357d" containerID="2d4e51158be2b5f92a79b4e84bd74daa75acc55b400571b511744c0d41ce3479" exitCode=0 Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.685636 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7q4l" event={"ID":"c6275d8b-919b-4d29-944b-f945f16f357d","Type":"ContainerDied","Data":"2d4e51158be2b5f92a79b4e84bd74daa75acc55b400571b511744c0d41ce3479"} Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.685665 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7q4l" event={"ID":"c6275d8b-919b-4d29-944b-f945f16f357d","Type":"ContainerDied","Data":"90470894a09b122fb4ff3f47da65481357e8f7a521667f17f4c0bb49561d2981"} Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.685681 4730 scope.go:117] "RemoveContainer" containerID="2d4e51158be2b5f92a79b4e84bd74daa75acc55b400571b511744c0d41ce3479" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.685803 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7q4l" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.727669 4730 scope.go:117] "RemoveContainer" containerID="3dbe0c9bc39b767f04679cf3d171ee88efa4bfe0713fc5753ef8cdba8f693b09" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.734054 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p7q4l"] Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.744597 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-p7q4l"] Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.753252 4730 scope.go:117] "RemoveContainer" containerID="9b1b09bfee4c9dff3a82b96d4b2daea08da4196c9af9929ac36870e2c86beb0a" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.803756 4730 scope.go:117] "RemoveContainer" containerID="2d4e51158be2b5f92a79b4e84bd74daa75acc55b400571b511744c0d41ce3479" Sep 30 10:13:15 crc kubenswrapper[4730]: E0930 10:13:15.804279 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d4e51158be2b5f92a79b4e84bd74daa75acc55b400571b511744c0d41ce3479\": container with ID starting with 2d4e51158be2b5f92a79b4e84bd74daa75acc55b400571b511744c0d41ce3479 not found: ID does not exist" containerID="2d4e51158be2b5f92a79b4e84bd74daa75acc55b400571b511744c0d41ce3479" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.804329 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d4e51158be2b5f92a79b4e84bd74daa75acc55b400571b511744c0d41ce3479"} err="failed to get container status \"2d4e51158be2b5f92a79b4e84bd74daa75acc55b400571b511744c0d41ce3479\": rpc error: code = NotFound desc = could not find container \"2d4e51158be2b5f92a79b4e84bd74daa75acc55b400571b511744c0d41ce3479\": container with ID starting with 2d4e51158be2b5f92a79b4e84bd74daa75acc55b400571b511744c0d41ce3479 not found: ID does not exist" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.804357 4730 scope.go:117] "RemoveContainer" containerID="3dbe0c9bc39b767f04679cf3d171ee88efa4bfe0713fc5753ef8cdba8f693b09" Sep 30 10:13:15 crc kubenswrapper[4730]: E0930 10:13:15.805028 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dbe0c9bc39b767f04679cf3d171ee88efa4bfe0713fc5753ef8cdba8f693b09\": container with ID starting with 3dbe0c9bc39b767f04679cf3d171ee88efa4bfe0713fc5753ef8cdba8f693b09 not found: ID does not exist" containerID="3dbe0c9bc39b767f04679cf3d171ee88efa4bfe0713fc5753ef8cdba8f693b09" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.805058 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dbe0c9bc39b767f04679cf3d171ee88efa4bfe0713fc5753ef8cdba8f693b09"} err="failed to get container status \"3dbe0c9bc39b767f04679cf3d171ee88efa4bfe0713fc5753ef8cdba8f693b09\": rpc error: code = NotFound desc = could not find container \"3dbe0c9bc39b767f04679cf3d171ee88efa4bfe0713fc5753ef8cdba8f693b09\": container with ID starting with 3dbe0c9bc39b767f04679cf3d171ee88efa4bfe0713fc5753ef8cdba8f693b09 not found: ID does not exist" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.805074 4730 scope.go:117] "RemoveContainer" containerID="9b1b09bfee4c9dff3a82b96d4b2daea08da4196c9af9929ac36870e2c86beb0a" Sep 30 10:13:15 crc kubenswrapper[4730]: E0930 10:13:15.805736 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b1b09bfee4c9dff3a82b96d4b2daea08da4196c9af9929ac36870e2c86beb0a\": container with ID starting with 9b1b09bfee4c9dff3a82b96d4b2daea08da4196c9af9929ac36870e2c86beb0a not found: ID does not exist" containerID="9b1b09bfee4c9dff3a82b96d4b2daea08da4196c9af9929ac36870e2c86beb0a" Sep 30 10:13:15 crc kubenswrapper[4730]: I0930 10:13:15.805761 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b1b09bfee4c9dff3a82b96d4b2daea08da4196c9af9929ac36870e2c86beb0a"} err="failed to get container status \"9b1b09bfee4c9dff3a82b96d4b2daea08da4196c9af9929ac36870e2c86beb0a\": rpc error: code = NotFound desc = could not find container \"9b1b09bfee4c9dff3a82b96d4b2daea08da4196c9af9929ac36870e2c86beb0a\": container with ID starting with 9b1b09bfee4c9dff3a82b96d4b2daea08da4196c9af9929ac36870e2c86beb0a not found: ID does not exist" Sep 30 10:13:16 crc kubenswrapper[4730]: I0930 10:13:16.391228 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6275d8b-919b-4d29-944b-f945f16f357d" path="/var/lib/kubelet/pods/c6275d8b-919b-4d29-944b-f945f16f357d/volumes" Sep 30 10:13:21 crc kubenswrapper[4730]: I0930 10:13:21.013816 4730 scope.go:117] "RemoveContainer" containerID="5dc6298b7f974539f124328135a3b32888083616c136c7cd162d72c85f31b1c9" Sep 30 10:13:21 crc kubenswrapper[4730]: I0930 10:13:21.046062 4730 scope.go:117] "RemoveContainer" containerID="3851eb4410c5f00bd60ff66065928ebd4542b4e668df64c96b4a174a4c77daf0" Sep 30 10:13:21 crc kubenswrapper[4730]: I0930 10:13:21.100333 4730 scope.go:117] "RemoveContainer" containerID="e68e1c63f375fee1c775a9cd351f8f67166a04f049890483cfa34efa17456e74" Sep 30 10:13:21 crc kubenswrapper[4730]: I0930 10:13:21.143774 4730 scope.go:117] "RemoveContainer" containerID="5d878091fad4cae748fb5870a38179b6869c36256849c1d8c0d335fdf3097d1e" Sep 30 10:13:21 crc kubenswrapper[4730]: I0930 10:13:21.173776 4730 scope.go:117] "RemoveContainer" containerID="0ffce5943abd58bf6d06872c1b8f6ed80c67947ad6ee098348b4d7ef2101da90" Sep 30 10:13:32 crc kubenswrapper[4730]: I0930 10:13:32.336554 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:13:32 crc kubenswrapper[4730]: I0930 10:13:32.337199 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:13:54 crc kubenswrapper[4730]: I0930 10:13:54.973893 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xngsm"] Sep 30 10:13:54 crc kubenswrapper[4730]: E0930 10:13:54.976081 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6275d8b-919b-4d29-944b-f945f16f357d" containerName="extract-utilities" Sep 30 10:13:54 crc kubenswrapper[4730]: I0930 10:13:54.976198 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6275d8b-919b-4d29-944b-f945f16f357d" containerName="extract-utilities" Sep 30 10:13:54 crc kubenswrapper[4730]: E0930 10:13:54.976286 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6275d8b-919b-4d29-944b-f945f16f357d" containerName="registry-server" Sep 30 10:13:54 crc kubenswrapper[4730]: I0930 10:13:54.976363 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6275d8b-919b-4d29-944b-f945f16f357d" containerName="registry-server" Sep 30 10:13:54 crc kubenswrapper[4730]: E0930 10:13:54.976451 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6275d8b-919b-4d29-944b-f945f16f357d" containerName="extract-content" Sep 30 10:13:54 crc kubenswrapper[4730]: I0930 10:13:54.976530 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6275d8b-919b-4d29-944b-f945f16f357d" containerName="extract-content" Sep 30 10:13:54 crc kubenswrapper[4730]: I0930 10:13:54.976845 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6275d8b-919b-4d29-944b-f945f16f357d" containerName="registry-server" Sep 30 10:13:54 crc kubenswrapper[4730]: I0930 10:13:54.978874 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:13:54 crc kubenswrapper[4730]: I0930 10:13:54.995078 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xngsm"] Sep 30 10:13:55 crc kubenswrapper[4730]: I0930 10:13:55.170039 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfwdb\" (UniqueName: \"kubernetes.io/projected/cce40d5f-6049-4330-84ed-6ae64304f30f-kube-api-access-rfwdb\") pod \"certified-operators-xngsm\" (UID: \"cce40d5f-6049-4330-84ed-6ae64304f30f\") " pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:13:55 crc kubenswrapper[4730]: I0930 10:13:55.170164 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cce40d5f-6049-4330-84ed-6ae64304f30f-catalog-content\") pod \"certified-operators-xngsm\" (UID: \"cce40d5f-6049-4330-84ed-6ae64304f30f\") " pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:13:55 crc kubenswrapper[4730]: I0930 10:13:55.170239 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cce40d5f-6049-4330-84ed-6ae64304f30f-utilities\") pod \"certified-operators-xngsm\" (UID: \"cce40d5f-6049-4330-84ed-6ae64304f30f\") " pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:13:55 crc kubenswrapper[4730]: I0930 10:13:55.271847 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cce40d5f-6049-4330-84ed-6ae64304f30f-utilities\") pod \"certified-operators-xngsm\" (UID: \"cce40d5f-6049-4330-84ed-6ae64304f30f\") " pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:13:55 crc kubenswrapper[4730]: I0930 10:13:55.272380 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cce40d5f-6049-4330-84ed-6ae64304f30f-utilities\") pod \"certified-operators-xngsm\" (UID: \"cce40d5f-6049-4330-84ed-6ae64304f30f\") " pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:13:55 crc kubenswrapper[4730]: I0930 10:13:55.272518 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfwdb\" (UniqueName: \"kubernetes.io/projected/cce40d5f-6049-4330-84ed-6ae64304f30f-kube-api-access-rfwdb\") pod \"certified-operators-xngsm\" (UID: \"cce40d5f-6049-4330-84ed-6ae64304f30f\") " pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:13:55 crc kubenswrapper[4730]: I0930 10:13:55.272671 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cce40d5f-6049-4330-84ed-6ae64304f30f-catalog-content\") pod \"certified-operators-xngsm\" (UID: \"cce40d5f-6049-4330-84ed-6ae64304f30f\") " pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:13:55 crc kubenswrapper[4730]: I0930 10:13:55.273166 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cce40d5f-6049-4330-84ed-6ae64304f30f-catalog-content\") pod \"certified-operators-xngsm\" (UID: \"cce40d5f-6049-4330-84ed-6ae64304f30f\") " pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:13:55 crc kubenswrapper[4730]: I0930 10:13:55.309445 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfwdb\" (UniqueName: \"kubernetes.io/projected/cce40d5f-6049-4330-84ed-6ae64304f30f-kube-api-access-rfwdb\") pod \"certified-operators-xngsm\" (UID: \"cce40d5f-6049-4330-84ed-6ae64304f30f\") " pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:13:55 crc kubenswrapper[4730]: I0930 10:13:55.602839 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:13:56 crc kubenswrapper[4730]: I0930 10:13:56.019941 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xngsm"] Sep 30 10:13:56 crc kubenswrapper[4730]: I0930 10:13:56.130666 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xngsm" event={"ID":"cce40d5f-6049-4330-84ed-6ae64304f30f","Type":"ContainerStarted","Data":"86c6af59a78729a88c172bb42e55efed2d2e24ee15a36a45820546cfc89db981"} Sep 30 10:13:57 crc kubenswrapper[4730]: I0930 10:13:57.143233 4730 generic.go:334] "Generic (PLEG): container finished" podID="cce40d5f-6049-4330-84ed-6ae64304f30f" containerID="180b4820c4b33dcab4b45ed8b6e4de9737bb91d54f028898a3429b854ff9b365" exitCode=0 Sep 30 10:13:57 crc kubenswrapper[4730]: I0930 10:13:57.143296 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xngsm" event={"ID":"cce40d5f-6049-4330-84ed-6ae64304f30f","Type":"ContainerDied","Data":"180b4820c4b33dcab4b45ed8b6e4de9737bb91d54f028898a3429b854ff9b365"} Sep 30 10:13:59 crc kubenswrapper[4730]: I0930 10:13:59.162718 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xngsm" event={"ID":"cce40d5f-6049-4330-84ed-6ae64304f30f","Type":"ContainerStarted","Data":"4a637720464a36565e3f392b6732b2717fd4658c137b8178802508b373cabc2f"} Sep 30 10:14:00 crc kubenswrapper[4730]: I0930 10:14:00.174290 4730 generic.go:334] "Generic (PLEG): container finished" podID="cce40d5f-6049-4330-84ed-6ae64304f30f" containerID="4a637720464a36565e3f392b6732b2717fd4658c137b8178802508b373cabc2f" exitCode=0 Sep 30 10:14:00 crc kubenswrapper[4730]: I0930 10:14:00.174418 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xngsm" event={"ID":"cce40d5f-6049-4330-84ed-6ae64304f30f","Type":"ContainerDied","Data":"4a637720464a36565e3f392b6732b2717fd4658c137b8178802508b373cabc2f"} Sep 30 10:14:01 crc kubenswrapper[4730]: I0930 10:14:01.186900 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xngsm" event={"ID":"cce40d5f-6049-4330-84ed-6ae64304f30f","Type":"ContainerStarted","Data":"a6d40710f396b2847478dbc9b6123a4ad532a658a8f02689de581a1b9103bdca"} Sep 30 10:14:02 crc kubenswrapper[4730]: I0930 10:14:02.337228 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:14:02 crc kubenswrapper[4730]: I0930 10:14:02.338168 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:14:05 crc kubenswrapper[4730]: I0930 10:14:05.603715 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:14:05 crc kubenswrapper[4730]: I0930 10:14:05.604203 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:14:05 crc kubenswrapper[4730]: I0930 10:14:05.647332 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:14:05 crc kubenswrapper[4730]: I0930 10:14:05.675531 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xngsm" podStartSLOduration=8.165270879 podStartE2EDuration="11.675505439s" podCreationTimestamp="2025-09-30 10:13:54 +0000 UTC" firstStartedPulling="2025-09-30 10:13:57.145438632 +0000 UTC m=+1481.478698625" lastFinishedPulling="2025-09-30 10:14:00.655673192 +0000 UTC m=+1484.988933185" observedRunningTime="2025-09-30 10:14:01.224100365 +0000 UTC m=+1485.557360358" watchObservedRunningTime="2025-09-30 10:14:05.675505439 +0000 UTC m=+1490.008765442" Sep 30 10:14:06 crc kubenswrapper[4730]: I0930 10:14:06.280153 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:14:06 crc kubenswrapper[4730]: I0930 10:14:06.319221 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xngsm"] Sep 30 10:14:08 crc kubenswrapper[4730]: I0930 10:14:08.258109 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xngsm" podUID="cce40d5f-6049-4330-84ed-6ae64304f30f" containerName="registry-server" containerID="cri-o://a6d40710f396b2847478dbc9b6123a4ad532a658a8f02689de581a1b9103bdca" gracePeriod=2 Sep 30 10:14:08 crc kubenswrapper[4730]: I0930 10:14:08.705449 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:14:08 crc kubenswrapper[4730]: I0930 10:14:08.852918 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cce40d5f-6049-4330-84ed-6ae64304f30f-utilities\") pod \"cce40d5f-6049-4330-84ed-6ae64304f30f\" (UID: \"cce40d5f-6049-4330-84ed-6ae64304f30f\") " Sep 30 10:14:08 crc kubenswrapper[4730]: I0930 10:14:08.853070 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cce40d5f-6049-4330-84ed-6ae64304f30f-catalog-content\") pod \"cce40d5f-6049-4330-84ed-6ae64304f30f\" (UID: \"cce40d5f-6049-4330-84ed-6ae64304f30f\") " Sep 30 10:14:08 crc kubenswrapper[4730]: I0930 10:14:08.853146 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfwdb\" (UniqueName: \"kubernetes.io/projected/cce40d5f-6049-4330-84ed-6ae64304f30f-kube-api-access-rfwdb\") pod \"cce40d5f-6049-4330-84ed-6ae64304f30f\" (UID: \"cce40d5f-6049-4330-84ed-6ae64304f30f\") " Sep 30 10:14:08 crc kubenswrapper[4730]: I0930 10:14:08.853989 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cce40d5f-6049-4330-84ed-6ae64304f30f-utilities" (OuterVolumeSpecName: "utilities") pod "cce40d5f-6049-4330-84ed-6ae64304f30f" (UID: "cce40d5f-6049-4330-84ed-6ae64304f30f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:14:08 crc kubenswrapper[4730]: I0930 10:14:08.858055 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cce40d5f-6049-4330-84ed-6ae64304f30f-kube-api-access-rfwdb" (OuterVolumeSpecName: "kube-api-access-rfwdb") pod "cce40d5f-6049-4330-84ed-6ae64304f30f" (UID: "cce40d5f-6049-4330-84ed-6ae64304f30f"). InnerVolumeSpecName "kube-api-access-rfwdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:14:08 crc kubenswrapper[4730]: I0930 10:14:08.906872 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cce40d5f-6049-4330-84ed-6ae64304f30f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cce40d5f-6049-4330-84ed-6ae64304f30f" (UID: "cce40d5f-6049-4330-84ed-6ae64304f30f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:14:08 crc kubenswrapper[4730]: I0930 10:14:08.957290 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cce40d5f-6049-4330-84ed-6ae64304f30f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:14:08 crc kubenswrapper[4730]: I0930 10:14:08.957369 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cce40d5f-6049-4330-84ed-6ae64304f30f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:14:08 crc kubenswrapper[4730]: I0930 10:14:08.957393 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfwdb\" (UniqueName: \"kubernetes.io/projected/cce40d5f-6049-4330-84ed-6ae64304f30f-kube-api-access-rfwdb\") on node \"crc\" DevicePath \"\"" Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.269141 4730 generic.go:334] "Generic (PLEG): container finished" podID="cce40d5f-6049-4330-84ed-6ae64304f30f" containerID="a6d40710f396b2847478dbc9b6123a4ad532a658a8f02689de581a1b9103bdca" exitCode=0 Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.269245 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xngsm" Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.269230 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xngsm" event={"ID":"cce40d5f-6049-4330-84ed-6ae64304f30f","Type":"ContainerDied","Data":"a6d40710f396b2847478dbc9b6123a4ad532a658a8f02689de581a1b9103bdca"} Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.269444 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xngsm" event={"ID":"cce40d5f-6049-4330-84ed-6ae64304f30f","Type":"ContainerDied","Data":"86c6af59a78729a88c172bb42e55efed2d2e24ee15a36a45820546cfc89db981"} Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.269462 4730 scope.go:117] "RemoveContainer" containerID="a6d40710f396b2847478dbc9b6123a4ad532a658a8f02689de581a1b9103bdca" Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.297499 4730 scope.go:117] "RemoveContainer" containerID="4a637720464a36565e3f392b6732b2717fd4658c137b8178802508b373cabc2f" Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.304207 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xngsm"] Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.314033 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xngsm"] Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.319060 4730 scope.go:117] "RemoveContainer" containerID="180b4820c4b33dcab4b45ed8b6e4de9737bb91d54f028898a3429b854ff9b365" Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.371695 4730 scope.go:117] "RemoveContainer" containerID="a6d40710f396b2847478dbc9b6123a4ad532a658a8f02689de581a1b9103bdca" Sep 30 10:14:09 crc kubenswrapper[4730]: E0930 10:14:09.372846 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6d40710f396b2847478dbc9b6123a4ad532a658a8f02689de581a1b9103bdca\": container with ID starting with a6d40710f396b2847478dbc9b6123a4ad532a658a8f02689de581a1b9103bdca not found: ID does not exist" containerID="a6d40710f396b2847478dbc9b6123a4ad532a658a8f02689de581a1b9103bdca" Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.372895 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6d40710f396b2847478dbc9b6123a4ad532a658a8f02689de581a1b9103bdca"} err="failed to get container status \"a6d40710f396b2847478dbc9b6123a4ad532a658a8f02689de581a1b9103bdca\": rpc error: code = NotFound desc = could not find container \"a6d40710f396b2847478dbc9b6123a4ad532a658a8f02689de581a1b9103bdca\": container with ID starting with a6d40710f396b2847478dbc9b6123a4ad532a658a8f02689de581a1b9103bdca not found: ID does not exist" Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.372922 4730 scope.go:117] "RemoveContainer" containerID="4a637720464a36565e3f392b6732b2717fd4658c137b8178802508b373cabc2f" Sep 30 10:14:09 crc kubenswrapper[4730]: E0930 10:14:09.373253 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a637720464a36565e3f392b6732b2717fd4658c137b8178802508b373cabc2f\": container with ID starting with 4a637720464a36565e3f392b6732b2717fd4658c137b8178802508b373cabc2f not found: ID does not exist" containerID="4a637720464a36565e3f392b6732b2717fd4658c137b8178802508b373cabc2f" Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.373302 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a637720464a36565e3f392b6732b2717fd4658c137b8178802508b373cabc2f"} err="failed to get container status \"4a637720464a36565e3f392b6732b2717fd4658c137b8178802508b373cabc2f\": rpc error: code = NotFound desc = could not find container \"4a637720464a36565e3f392b6732b2717fd4658c137b8178802508b373cabc2f\": container with ID starting with 4a637720464a36565e3f392b6732b2717fd4658c137b8178802508b373cabc2f not found: ID does not exist" Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.373337 4730 scope.go:117] "RemoveContainer" containerID="180b4820c4b33dcab4b45ed8b6e4de9737bb91d54f028898a3429b854ff9b365" Sep 30 10:14:09 crc kubenswrapper[4730]: E0930 10:14:09.373675 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"180b4820c4b33dcab4b45ed8b6e4de9737bb91d54f028898a3429b854ff9b365\": container with ID starting with 180b4820c4b33dcab4b45ed8b6e4de9737bb91d54f028898a3429b854ff9b365 not found: ID does not exist" containerID="180b4820c4b33dcab4b45ed8b6e4de9737bb91d54f028898a3429b854ff9b365" Sep 30 10:14:09 crc kubenswrapper[4730]: I0930 10:14:09.373730 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"180b4820c4b33dcab4b45ed8b6e4de9737bb91d54f028898a3429b854ff9b365"} err="failed to get container status \"180b4820c4b33dcab4b45ed8b6e4de9737bb91d54f028898a3429b854ff9b365\": rpc error: code = NotFound desc = could not find container \"180b4820c4b33dcab4b45ed8b6e4de9737bb91d54f028898a3429b854ff9b365\": container with ID starting with 180b4820c4b33dcab4b45ed8b6e4de9737bb91d54f028898a3429b854ff9b365 not found: ID does not exist" Sep 30 10:14:10 crc kubenswrapper[4730]: I0930 10:14:10.393046 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cce40d5f-6049-4330-84ed-6ae64304f30f" path="/var/lib/kubelet/pods/cce40d5f-6049-4330-84ed-6ae64304f30f/volumes" Sep 30 10:14:32 crc kubenswrapper[4730]: I0930 10:14:32.336413 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:14:32 crc kubenswrapper[4730]: I0930 10:14:32.336959 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:14:32 crc kubenswrapper[4730]: I0930 10:14:32.337008 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 10:14:32 crc kubenswrapper[4730]: I0930 10:14:32.337772 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 10:14:32 crc kubenswrapper[4730]: I0930 10:14:32.337825 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" gracePeriod=600 Sep 30 10:14:33 crc kubenswrapper[4730]: E0930 10:14:33.019368 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:14:33 crc kubenswrapper[4730]: I0930 10:14:33.491844 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" exitCode=0 Sep 30 10:14:33 crc kubenswrapper[4730]: I0930 10:14:33.491905 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44"} Sep 30 10:14:33 crc kubenswrapper[4730]: I0930 10:14:33.491943 4730 scope.go:117] "RemoveContainer" containerID="d80d6bf84aad0f9e13029ef1a54a6e376ee3848702f4ba4ce0570e2a35ec8e0c" Sep 30 10:14:33 crc kubenswrapper[4730]: I0930 10:14:33.492594 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:14:33 crc kubenswrapper[4730]: E0930 10:14:33.492840 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:14:47 crc kubenswrapper[4730]: I0930 10:14:47.381790 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:14:47 crc kubenswrapper[4730]: E0930 10:14:47.382665 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.149237 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb"] Sep 30 10:15:00 crc kubenswrapper[4730]: E0930 10:15:00.150346 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cce40d5f-6049-4330-84ed-6ae64304f30f" containerName="extract-utilities" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.150367 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cce40d5f-6049-4330-84ed-6ae64304f30f" containerName="extract-utilities" Sep 30 10:15:00 crc kubenswrapper[4730]: E0930 10:15:00.150378 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cce40d5f-6049-4330-84ed-6ae64304f30f" containerName="extract-content" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.150386 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cce40d5f-6049-4330-84ed-6ae64304f30f" containerName="extract-content" Sep 30 10:15:00 crc kubenswrapper[4730]: E0930 10:15:00.150427 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cce40d5f-6049-4330-84ed-6ae64304f30f" containerName="registry-server" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.150436 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cce40d5f-6049-4330-84ed-6ae64304f30f" containerName="registry-server" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.150654 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="cce40d5f-6049-4330-84ed-6ae64304f30f" containerName="registry-server" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.151423 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.155063 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.155072 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.158025 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb"] Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.232113 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvfvr\" (UniqueName: \"kubernetes.io/projected/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-kube-api-access-pvfvr\") pod \"collect-profiles-29320455-5kztb\" (UID: \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.232341 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-secret-volume\") pod \"collect-profiles-29320455-5kztb\" (UID: \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.232658 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-config-volume\") pod \"collect-profiles-29320455-5kztb\" (UID: \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.335027 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-secret-volume\") pod \"collect-profiles-29320455-5kztb\" (UID: \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.335197 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-config-volume\") pod \"collect-profiles-29320455-5kztb\" (UID: \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.335386 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvfvr\" (UniqueName: \"kubernetes.io/projected/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-kube-api-access-pvfvr\") pod \"collect-profiles-29320455-5kztb\" (UID: \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.336317 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-config-volume\") pod \"collect-profiles-29320455-5kztb\" (UID: \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.342320 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-secret-volume\") pod \"collect-profiles-29320455-5kztb\" (UID: \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.351821 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvfvr\" (UniqueName: \"kubernetes.io/projected/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-kube-api-access-pvfvr\") pod \"collect-profiles-29320455-5kztb\" (UID: \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.504516 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" Sep 30 10:15:00 crc kubenswrapper[4730]: I0930 10:15:00.940206 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb"] Sep 30 10:15:01 crc kubenswrapper[4730]: I0930 10:15:01.755146 4730 generic.go:334] "Generic (PLEG): container finished" podID="fcbfe8d1-fee7-4f4a-92ab-ce604a005970" containerID="06593d3f54df33993be4326aa4cac6ec6ee31030083c90bae682bdaf2e492751" exitCode=0 Sep 30 10:15:01 crc kubenswrapper[4730]: I0930 10:15:01.756146 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" event={"ID":"fcbfe8d1-fee7-4f4a-92ab-ce604a005970","Type":"ContainerDied","Data":"06593d3f54df33993be4326aa4cac6ec6ee31030083c90bae682bdaf2e492751"} Sep 30 10:15:01 crc kubenswrapper[4730]: I0930 10:15:01.756441 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" event={"ID":"fcbfe8d1-fee7-4f4a-92ab-ce604a005970","Type":"ContainerStarted","Data":"7c23db6fe7fb4f69cc1b60991d9c73ec2b9ad60a413a49dc07dff131b29d0966"} Sep 30 10:15:02 crc kubenswrapper[4730]: I0930 10:15:02.381430 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:15:02 crc kubenswrapper[4730]: E0930 10:15:02.381709 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:15:03 crc kubenswrapper[4730]: I0930 10:15:03.103871 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" Sep 30 10:15:03 crc kubenswrapper[4730]: I0930 10:15:03.190946 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvfvr\" (UniqueName: \"kubernetes.io/projected/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-kube-api-access-pvfvr\") pod \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\" (UID: \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\") " Sep 30 10:15:03 crc kubenswrapper[4730]: I0930 10:15:03.191139 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-config-volume\") pod \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\" (UID: \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\") " Sep 30 10:15:03 crc kubenswrapper[4730]: I0930 10:15:03.191195 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-secret-volume\") pod \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\" (UID: \"fcbfe8d1-fee7-4f4a-92ab-ce604a005970\") " Sep 30 10:15:03 crc kubenswrapper[4730]: I0930 10:15:03.191664 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-config-volume" (OuterVolumeSpecName: "config-volume") pod "fcbfe8d1-fee7-4f4a-92ab-ce604a005970" (UID: "fcbfe8d1-fee7-4f4a-92ab-ce604a005970"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:15:03 crc kubenswrapper[4730]: I0930 10:15:03.191864 4730 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 10:15:03 crc kubenswrapper[4730]: I0930 10:15:03.198820 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-kube-api-access-pvfvr" (OuterVolumeSpecName: "kube-api-access-pvfvr") pod "fcbfe8d1-fee7-4f4a-92ab-ce604a005970" (UID: "fcbfe8d1-fee7-4f4a-92ab-ce604a005970"). InnerVolumeSpecName "kube-api-access-pvfvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:15:03 crc kubenswrapper[4730]: I0930 10:15:03.198995 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fcbfe8d1-fee7-4f4a-92ab-ce604a005970" (UID: "fcbfe8d1-fee7-4f4a-92ab-ce604a005970"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:15:03 crc kubenswrapper[4730]: I0930 10:15:03.294109 4730 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 10:15:03 crc kubenswrapper[4730]: I0930 10:15:03.294150 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvfvr\" (UniqueName: \"kubernetes.io/projected/fcbfe8d1-fee7-4f4a-92ab-ce604a005970-kube-api-access-pvfvr\") on node \"crc\" DevicePath \"\"" Sep 30 10:15:03 crc kubenswrapper[4730]: I0930 10:15:03.777784 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" event={"ID":"fcbfe8d1-fee7-4f4a-92ab-ce604a005970","Type":"ContainerDied","Data":"7c23db6fe7fb4f69cc1b60991d9c73ec2b9ad60a413a49dc07dff131b29d0966"} Sep 30 10:15:03 crc kubenswrapper[4730]: I0930 10:15:03.778304 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c23db6fe7fb4f69cc1b60991d9c73ec2b9ad60a413a49dc07dff131b29d0966" Sep 30 10:15:03 crc kubenswrapper[4730]: I0930 10:15:03.777880 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb" Sep 30 10:15:04 crc kubenswrapper[4730]: I0930 10:15:04.788203 4730 generic.go:334] "Generic (PLEG): container finished" podID="94e7b621-95ed-40de-bf21-f3398f10bace" containerID="e07ea6d734f17ae651a76201ba3c6c4296a3176aa665456a828480cc6a95e800" exitCode=0 Sep 30 10:15:04 crc kubenswrapper[4730]: I0930 10:15:04.788249 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" event={"ID":"94e7b621-95ed-40de-bf21-f3398f10bace","Type":"ContainerDied","Data":"e07ea6d734f17ae651a76201ba3c6c4296a3176aa665456a828480cc6a95e800"} Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.217286 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.245915 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-ssh-key\") pod \"94e7b621-95ed-40de-bf21-f3398f10bace\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.246052 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-bootstrap-combined-ca-bundle\") pod \"94e7b621-95ed-40de-bf21-f3398f10bace\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.246090 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-inventory\") pod \"94e7b621-95ed-40de-bf21-f3398f10bace\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.246133 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8cq4\" (UniqueName: \"kubernetes.io/projected/94e7b621-95ed-40de-bf21-f3398f10bace-kube-api-access-s8cq4\") pod \"94e7b621-95ed-40de-bf21-f3398f10bace\" (UID: \"94e7b621-95ed-40de-bf21-f3398f10bace\") " Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.252113 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94e7b621-95ed-40de-bf21-f3398f10bace-kube-api-access-s8cq4" (OuterVolumeSpecName: "kube-api-access-s8cq4") pod "94e7b621-95ed-40de-bf21-f3398f10bace" (UID: "94e7b621-95ed-40de-bf21-f3398f10bace"). InnerVolumeSpecName "kube-api-access-s8cq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.253105 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "94e7b621-95ed-40de-bf21-f3398f10bace" (UID: "94e7b621-95ed-40de-bf21-f3398f10bace"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.274862 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "94e7b621-95ed-40de-bf21-f3398f10bace" (UID: "94e7b621-95ed-40de-bf21-f3398f10bace"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.275637 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-inventory" (OuterVolumeSpecName: "inventory") pod "94e7b621-95ed-40de-bf21-f3398f10bace" (UID: "94e7b621-95ed-40de-bf21-f3398f10bace"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.348457 4730 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.348866 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.348880 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8cq4\" (UniqueName: \"kubernetes.io/projected/94e7b621-95ed-40de-bf21-f3398f10bace-kube-api-access-s8cq4\") on node \"crc\" DevicePath \"\"" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.348892 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94e7b621-95ed-40de-bf21-f3398f10bace-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.812145 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" event={"ID":"94e7b621-95ed-40de-bf21-f3398f10bace","Type":"ContainerDied","Data":"f585faedc3645366ff6e00d59c2e07fcfef7b3e1ab7ecb07cf585b78a64131bb"} Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.812205 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f585faedc3645366ff6e00d59c2e07fcfef7b3e1ab7ecb07cf585b78a64131bb" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.812211 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.896336 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc"] Sep 30 10:15:06 crc kubenswrapper[4730]: E0930 10:15:06.896831 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcbfe8d1-fee7-4f4a-92ab-ce604a005970" containerName="collect-profiles" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.896854 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcbfe8d1-fee7-4f4a-92ab-ce604a005970" containerName="collect-profiles" Sep 30 10:15:06 crc kubenswrapper[4730]: E0930 10:15:06.896872 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94e7b621-95ed-40de-bf21-f3398f10bace" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.896880 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="94e7b621-95ed-40de-bf21-f3398f10bace" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.897082 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcbfe8d1-fee7-4f4a-92ab-ce604a005970" containerName="collect-profiles" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.897107 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="94e7b621-95ed-40de-bf21-f3398f10bace" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.898304 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.901437 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.901663 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.901891 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.902066 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.920928 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc"] Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.964123 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/573673c2-6d89-478c-bcae-c6a1b77e0c94-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc\" (UID: \"573673c2-6d89-478c-bcae-c6a1b77e0c94\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.964395 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/573673c2-6d89-478c-bcae-c6a1b77e0c94-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc\" (UID: \"573673c2-6d89-478c-bcae-c6a1b77e0c94\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" Sep 30 10:15:06 crc kubenswrapper[4730]: I0930 10:15:06.964500 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-227lg\" (UniqueName: \"kubernetes.io/projected/573673c2-6d89-478c-bcae-c6a1b77e0c94-kube-api-access-227lg\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc\" (UID: \"573673c2-6d89-478c-bcae-c6a1b77e0c94\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" Sep 30 10:15:07 crc kubenswrapper[4730]: I0930 10:15:07.066171 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-227lg\" (UniqueName: \"kubernetes.io/projected/573673c2-6d89-478c-bcae-c6a1b77e0c94-kube-api-access-227lg\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc\" (UID: \"573673c2-6d89-478c-bcae-c6a1b77e0c94\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" Sep 30 10:15:07 crc kubenswrapper[4730]: I0930 10:15:07.066308 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/573673c2-6d89-478c-bcae-c6a1b77e0c94-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc\" (UID: \"573673c2-6d89-478c-bcae-c6a1b77e0c94\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" Sep 30 10:15:07 crc kubenswrapper[4730]: I0930 10:15:07.066446 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/573673c2-6d89-478c-bcae-c6a1b77e0c94-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc\" (UID: \"573673c2-6d89-478c-bcae-c6a1b77e0c94\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" Sep 30 10:15:07 crc kubenswrapper[4730]: I0930 10:15:07.070869 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/573673c2-6d89-478c-bcae-c6a1b77e0c94-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc\" (UID: \"573673c2-6d89-478c-bcae-c6a1b77e0c94\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" Sep 30 10:15:07 crc kubenswrapper[4730]: I0930 10:15:07.080231 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/573673c2-6d89-478c-bcae-c6a1b77e0c94-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc\" (UID: \"573673c2-6d89-478c-bcae-c6a1b77e0c94\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" Sep 30 10:15:07 crc kubenswrapper[4730]: I0930 10:15:07.083187 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-227lg\" (UniqueName: \"kubernetes.io/projected/573673c2-6d89-478c-bcae-c6a1b77e0c94-kube-api-access-227lg\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc\" (UID: \"573673c2-6d89-478c-bcae-c6a1b77e0c94\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" Sep 30 10:15:07 crc kubenswrapper[4730]: I0930 10:15:07.224147 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" Sep 30 10:15:07 crc kubenswrapper[4730]: I0930 10:15:07.702868 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc"] Sep 30 10:15:07 crc kubenswrapper[4730]: W0930 10:15:07.704298 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod573673c2_6d89_478c_bcae_c6a1b77e0c94.slice/crio-2d27717d37fb221d31e6ca39216b0e68fdb5a93196b39a3b22308580a00bb6f7 WatchSource:0}: Error finding container 2d27717d37fb221d31e6ca39216b0e68fdb5a93196b39a3b22308580a00bb6f7: Status 404 returned error can't find the container with id 2d27717d37fb221d31e6ca39216b0e68fdb5a93196b39a3b22308580a00bb6f7 Sep 30 10:15:07 crc kubenswrapper[4730]: I0930 10:15:07.706625 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 10:15:07 crc kubenswrapper[4730]: I0930 10:15:07.822952 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" event={"ID":"573673c2-6d89-478c-bcae-c6a1b77e0c94","Type":"ContainerStarted","Data":"2d27717d37fb221d31e6ca39216b0e68fdb5a93196b39a3b22308580a00bb6f7"} Sep 30 10:15:08 crc kubenswrapper[4730]: I0930 10:15:08.833204 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" event={"ID":"573673c2-6d89-478c-bcae-c6a1b77e0c94","Type":"ContainerStarted","Data":"24fa7aae52a8f9e3dcade1ba505d869de59aedb4b1dd755a3f0cb7e9b28ea0ed"} Sep 30 10:15:09 crc kubenswrapper[4730]: I0930 10:15:09.874257 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" podStartSLOduration=3.029694828 podStartE2EDuration="3.874240041s" podCreationTimestamp="2025-09-30 10:15:06 +0000 UTC" firstStartedPulling="2025-09-30 10:15:07.706376439 +0000 UTC m=+1552.039636432" lastFinishedPulling="2025-09-30 10:15:08.550921652 +0000 UTC m=+1552.884181645" observedRunningTime="2025-09-30 10:15:09.862278367 +0000 UTC m=+1554.195538360" watchObservedRunningTime="2025-09-30 10:15:09.874240041 +0000 UTC m=+1554.207500044" Sep 30 10:15:13 crc kubenswrapper[4730]: I0930 10:15:13.381586 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:15:13 crc kubenswrapper[4730]: E0930 10:15:13.382405 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:15:28 crc kubenswrapper[4730]: I0930 10:15:28.380857 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:15:28 crc kubenswrapper[4730]: E0930 10:15:28.381833 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:15:42 crc kubenswrapper[4730]: I0930 10:15:42.381303 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:15:42 crc kubenswrapper[4730]: E0930 10:15:42.381987 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:15:53 crc kubenswrapper[4730]: I0930 10:15:53.380916 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:15:53 crc kubenswrapper[4730]: E0930 10:15:53.381541 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:16:08 crc kubenswrapper[4730]: I0930 10:16:08.382087 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:16:08 crc kubenswrapper[4730]: E0930 10:16:08.382812 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:16:17 crc kubenswrapper[4730]: I0930 10:16:17.429728 4730 generic.go:334] "Generic (PLEG): container finished" podID="573673c2-6d89-478c-bcae-c6a1b77e0c94" containerID="24fa7aae52a8f9e3dcade1ba505d869de59aedb4b1dd755a3f0cb7e9b28ea0ed" exitCode=0 Sep 30 10:16:17 crc kubenswrapper[4730]: I0930 10:16:17.429787 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" event={"ID":"573673c2-6d89-478c-bcae-c6a1b77e0c94","Type":"ContainerDied","Data":"24fa7aae52a8f9e3dcade1ba505d869de59aedb4b1dd755a3f0cb7e9b28ea0ed"} Sep 30 10:16:18 crc kubenswrapper[4730]: I0930 10:16:18.873099 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.014473 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-227lg\" (UniqueName: \"kubernetes.io/projected/573673c2-6d89-478c-bcae-c6a1b77e0c94-kube-api-access-227lg\") pod \"573673c2-6d89-478c-bcae-c6a1b77e0c94\" (UID: \"573673c2-6d89-478c-bcae-c6a1b77e0c94\") " Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.014563 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/573673c2-6d89-478c-bcae-c6a1b77e0c94-inventory\") pod \"573673c2-6d89-478c-bcae-c6a1b77e0c94\" (UID: \"573673c2-6d89-478c-bcae-c6a1b77e0c94\") " Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.014642 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/573673c2-6d89-478c-bcae-c6a1b77e0c94-ssh-key\") pod \"573673c2-6d89-478c-bcae-c6a1b77e0c94\" (UID: \"573673c2-6d89-478c-bcae-c6a1b77e0c94\") " Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.019866 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/573673c2-6d89-478c-bcae-c6a1b77e0c94-kube-api-access-227lg" (OuterVolumeSpecName: "kube-api-access-227lg") pod "573673c2-6d89-478c-bcae-c6a1b77e0c94" (UID: "573673c2-6d89-478c-bcae-c6a1b77e0c94"). InnerVolumeSpecName "kube-api-access-227lg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.042812 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/573673c2-6d89-478c-bcae-c6a1b77e0c94-inventory" (OuterVolumeSpecName: "inventory") pod "573673c2-6d89-478c-bcae-c6a1b77e0c94" (UID: "573673c2-6d89-478c-bcae-c6a1b77e0c94"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.046321 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/573673c2-6d89-478c-bcae-c6a1b77e0c94-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "573673c2-6d89-478c-bcae-c6a1b77e0c94" (UID: "573673c2-6d89-478c-bcae-c6a1b77e0c94"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.116562 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-227lg\" (UniqueName: \"kubernetes.io/projected/573673c2-6d89-478c-bcae-c6a1b77e0c94-kube-api-access-227lg\") on node \"crc\" DevicePath \"\"" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.116594 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/573673c2-6d89-478c-bcae-c6a1b77e0c94-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.116603 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/573673c2-6d89-478c-bcae-c6a1b77e0c94-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.381195 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:16:19 crc kubenswrapper[4730]: E0930 10:16:19.381513 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.452830 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" event={"ID":"573673c2-6d89-478c-bcae-c6a1b77e0c94","Type":"ContainerDied","Data":"2d27717d37fb221d31e6ca39216b0e68fdb5a93196b39a3b22308580a00bb6f7"} Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.452871 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d27717d37fb221d31e6ca39216b0e68fdb5a93196b39a3b22308580a00bb6f7" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.452878 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.534091 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2"] Sep 30 10:16:19 crc kubenswrapper[4730]: E0930 10:16:19.534576 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="573673c2-6d89-478c-bcae-c6a1b77e0c94" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.534603 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="573673c2-6d89-478c-bcae-c6a1b77e0c94" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.534878 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="573673c2-6d89-478c-bcae-c6a1b77e0c94" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.535548 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.540913 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.541165 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.541211 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.541257 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.547558 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2"] Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.626732 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27e64b2d-13ec-4303-9468-86b81dd4a2d0-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2\" (UID: \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.627084 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27e64b2d-13ec-4303-9468-86b81dd4a2d0-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2\" (UID: \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.627312 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xt4ck\" (UniqueName: \"kubernetes.io/projected/27e64b2d-13ec-4303-9468-86b81dd4a2d0-kube-api-access-xt4ck\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2\" (UID: \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.728716 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27e64b2d-13ec-4303-9468-86b81dd4a2d0-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2\" (UID: \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.728880 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27e64b2d-13ec-4303-9468-86b81dd4a2d0-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2\" (UID: \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.728981 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xt4ck\" (UniqueName: \"kubernetes.io/projected/27e64b2d-13ec-4303-9468-86b81dd4a2d0-kube-api-access-xt4ck\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2\" (UID: \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.732260 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27e64b2d-13ec-4303-9468-86b81dd4a2d0-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2\" (UID: \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.732360 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27e64b2d-13ec-4303-9468-86b81dd4a2d0-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2\" (UID: \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.747860 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xt4ck\" (UniqueName: \"kubernetes.io/projected/27e64b2d-13ec-4303-9468-86b81dd4a2d0-kube-api-access-xt4ck\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2\" (UID: \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" Sep 30 10:16:19 crc kubenswrapper[4730]: I0930 10:16:19.852446 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" Sep 30 10:16:20 crc kubenswrapper[4730]: I0930 10:16:20.407445 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2"] Sep 30 10:16:20 crc kubenswrapper[4730]: I0930 10:16:20.462149 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" event={"ID":"27e64b2d-13ec-4303-9468-86b81dd4a2d0","Type":"ContainerStarted","Data":"cdedd6b0cc7fc44a31bd0b2eca59753d207903c792a9ddbcbb4e27ee1ba612c1"} Sep 30 10:16:21 crc kubenswrapper[4730]: I0930 10:16:21.354331 4730 scope.go:117] "RemoveContainer" containerID="fb1693baff86423933117205eb4ddd3fb4c95696cd57ff870d3b2179798c4193" Sep 30 10:16:21 crc kubenswrapper[4730]: I0930 10:16:21.379301 4730 scope.go:117] "RemoveContainer" containerID="b369be31d719c6ab268dc519c85726e61f2d1a1f8e2769006f34e26002ae7c98" Sep 30 10:16:21 crc kubenswrapper[4730]: I0930 10:16:21.472083 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" event={"ID":"27e64b2d-13ec-4303-9468-86b81dd4a2d0","Type":"ContainerStarted","Data":"f83434abfe73661a9c41e0686aa93032d0e514b8a5753e3b44b7129b315fc57e"} Sep 30 10:16:21 crc kubenswrapper[4730]: I0930 10:16:21.499298 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" podStartSLOduration=2.030711547 podStartE2EDuration="2.499264881s" podCreationTimestamp="2025-09-30 10:16:19 +0000 UTC" firstStartedPulling="2025-09-30 10:16:20.412250959 +0000 UTC m=+1624.745510962" lastFinishedPulling="2025-09-30 10:16:20.880804303 +0000 UTC m=+1625.214064296" observedRunningTime="2025-09-30 10:16:21.484727569 +0000 UTC m=+1625.817987562" watchObservedRunningTime="2025-09-30 10:16:21.499264881 +0000 UTC m=+1625.832524924" Sep 30 10:16:26 crc kubenswrapper[4730]: I0930 10:16:26.060007 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-2gnxz"] Sep 30 10:16:26 crc kubenswrapper[4730]: I0930 10:16:26.075387 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-6spxn"] Sep 30 10:16:26 crc kubenswrapper[4730]: I0930 10:16:26.085365 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-2gnxz"] Sep 30 10:16:26 crc kubenswrapper[4730]: I0930 10:16:26.095213 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-6spxn"] Sep 30 10:16:26 crc kubenswrapper[4730]: I0930 10:16:26.398093 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="290bc85b-b1ab-4ddf-8e0f-a0b04df356bb" path="/var/lib/kubelet/pods/290bc85b-b1ab-4ddf-8e0f-a0b04df356bb/volumes" Sep 30 10:16:26 crc kubenswrapper[4730]: I0930 10:16:26.399284 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88317b4-c432-4d17-bc96-350867eadc61" path="/var/lib/kubelet/pods/f88317b4-c432-4d17-bc96-350867eadc61/volumes" Sep 30 10:16:26 crc kubenswrapper[4730]: I0930 10:16:26.518911 4730 generic.go:334] "Generic (PLEG): container finished" podID="27e64b2d-13ec-4303-9468-86b81dd4a2d0" containerID="f83434abfe73661a9c41e0686aa93032d0e514b8a5753e3b44b7129b315fc57e" exitCode=0 Sep 30 10:16:26 crc kubenswrapper[4730]: I0930 10:16:26.518956 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" event={"ID":"27e64b2d-13ec-4303-9468-86b81dd4a2d0","Type":"ContainerDied","Data":"f83434abfe73661a9c41e0686aa93032d0e514b8a5753e3b44b7129b315fc57e"} Sep 30 10:16:27 crc kubenswrapper[4730]: I0930 10:16:27.919036 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" Sep 30 10:16:27 crc kubenswrapper[4730]: I0930 10:16:27.987910 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xt4ck\" (UniqueName: \"kubernetes.io/projected/27e64b2d-13ec-4303-9468-86b81dd4a2d0-kube-api-access-xt4ck\") pod \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\" (UID: \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\") " Sep 30 10:16:27 crc kubenswrapper[4730]: I0930 10:16:27.988092 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27e64b2d-13ec-4303-9468-86b81dd4a2d0-inventory\") pod \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\" (UID: \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\") " Sep 30 10:16:27 crc kubenswrapper[4730]: I0930 10:16:27.988168 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27e64b2d-13ec-4303-9468-86b81dd4a2d0-ssh-key\") pod \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\" (UID: \"27e64b2d-13ec-4303-9468-86b81dd4a2d0\") " Sep 30 10:16:27 crc kubenswrapper[4730]: I0930 10:16:27.993277 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27e64b2d-13ec-4303-9468-86b81dd4a2d0-kube-api-access-xt4ck" (OuterVolumeSpecName: "kube-api-access-xt4ck") pod "27e64b2d-13ec-4303-9468-86b81dd4a2d0" (UID: "27e64b2d-13ec-4303-9468-86b81dd4a2d0"). InnerVolumeSpecName "kube-api-access-xt4ck". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.019833 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e64b2d-13ec-4303-9468-86b81dd4a2d0-inventory" (OuterVolumeSpecName: "inventory") pod "27e64b2d-13ec-4303-9468-86b81dd4a2d0" (UID: "27e64b2d-13ec-4303-9468-86b81dd4a2d0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.022297 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e64b2d-13ec-4303-9468-86b81dd4a2d0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "27e64b2d-13ec-4303-9468-86b81dd4a2d0" (UID: "27e64b2d-13ec-4303-9468-86b81dd4a2d0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.047268 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-create-87fm4"] Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.057972 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-create-87fm4"] Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.089941 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xt4ck\" (UniqueName: \"kubernetes.io/projected/27e64b2d-13ec-4303-9468-86b81dd4a2d0-kube-api-access-xt4ck\") on node \"crc\" DevicePath \"\"" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.089981 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27e64b2d-13ec-4303-9468-86b81dd4a2d0-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.089993 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27e64b2d-13ec-4303-9468-86b81dd4a2d0-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.394468 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa1eee2f-4cdc-408e-b5ec-e142f7a33250" path="/var/lib/kubelet/pods/fa1eee2f-4cdc-408e-b5ec-e142f7a33250/volumes" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.551179 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" event={"ID":"27e64b2d-13ec-4303-9468-86b81dd4a2d0","Type":"ContainerDied","Data":"cdedd6b0cc7fc44a31bd0b2eca59753d207903c792a9ddbcbb4e27ee1ba612c1"} Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.551222 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cdedd6b0cc7fc44a31bd0b2eca59753d207903c792a9ddbcbb4e27ee1ba612c1" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.551283 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.614872 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x"] Sep 30 10:16:28 crc kubenswrapper[4730]: E0930 10:16:28.615541 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e64b2d-13ec-4303-9468-86b81dd4a2d0" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.615578 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e64b2d-13ec-4303-9468-86b81dd4a2d0" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.615961 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="27e64b2d-13ec-4303-9468-86b81dd4a2d0" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.616924 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.624188 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x"] Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.647223 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.647279 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.647588 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.676001 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.705106 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9ws2x\" (UID: \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.705176 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qzk5\" (UniqueName: \"kubernetes.io/projected/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-kube-api-access-6qzk5\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9ws2x\" (UID: \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.705267 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9ws2x\" (UID: \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.807202 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9ws2x\" (UID: \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.807260 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qzk5\" (UniqueName: \"kubernetes.io/projected/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-kube-api-access-6qzk5\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9ws2x\" (UID: \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.807338 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9ws2x\" (UID: \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.811933 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9ws2x\" (UID: \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.812188 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9ws2x\" (UID: \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.823829 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qzk5\" (UniqueName: \"kubernetes.io/projected/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-kube-api-access-6qzk5\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9ws2x\" (UID: \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" Sep 30 10:16:28 crc kubenswrapper[4730]: I0930 10:16:28.977104 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" Sep 30 10:16:29 crc kubenswrapper[4730]: I0930 10:16:29.484726 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x"] Sep 30 10:16:29 crc kubenswrapper[4730]: I0930 10:16:29.560743 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" event={"ID":"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee","Type":"ContainerStarted","Data":"fb8305a6a04acc8d54248acc0de191dcc459e0e75cd17cab2f872ba57b9d82dc"} Sep 30 10:16:30 crc kubenswrapper[4730]: I0930 10:16:30.580482 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" event={"ID":"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee","Type":"ContainerStarted","Data":"ed706224189e623dcca846ac44bd976a99861004af3c3ec6cc610492e737eb33"} Sep 30 10:16:30 crc kubenswrapper[4730]: I0930 10:16:30.607538 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" podStartSLOduration=2.145049916 podStartE2EDuration="2.607517554s" podCreationTimestamp="2025-09-30 10:16:28 +0000 UTC" firstStartedPulling="2025-09-30 10:16:29.488531197 +0000 UTC m=+1633.821791190" lastFinishedPulling="2025-09-30 10:16:29.950998815 +0000 UTC m=+1634.284258828" observedRunningTime="2025-09-30 10:16:30.601791388 +0000 UTC m=+1634.935051381" watchObservedRunningTime="2025-09-30 10:16:30.607517554 +0000 UTC m=+1634.940777567" Sep 30 10:16:31 crc kubenswrapper[4730]: I0930 10:16:31.033267 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-85hvz"] Sep 30 10:16:31 crc kubenswrapper[4730]: I0930 10:16:31.047282 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-85hvz"] Sep 30 10:16:32 crc kubenswrapper[4730]: I0930 10:16:32.396026 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a955da24-88da-474d-9370-f146730abd1a" path="/var/lib/kubelet/pods/a955da24-88da-474d-9370-f146730abd1a/volumes" Sep 30 10:16:34 crc kubenswrapper[4730]: I0930 10:16:34.381074 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:16:34 crc kubenswrapper[4730]: E0930 10:16:34.381496 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:16:39 crc kubenswrapper[4730]: I0930 10:16:39.036927 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-2c9c-account-create-9hs8k"] Sep 30 10:16:39 crc kubenswrapper[4730]: I0930 10:16:39.045192 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-f3dc-account-create-ncmmt"] Sep 30 10:16:39 crc kubenswrapper[4730]: I0930 10:16:39.055372 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-9b71-account-create-5mbkm"] Sep 30 10:16:39 crc kubenswrapper[4730]: I0930 10:16:39.067516 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-f3dc-account-create-ncmmt"] Sep 30 10:16:39 crc kubenswrapper[4730]: I0930 10:16:39.077802 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-9b71-account-create-5mbkm"] Sep 30 10:16:39 crc kubenswrapper[4730]: I0930 10:16:39.087152 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-2c9c-account-create-9hs8k"] Sep 30 10:16:40 crc kubenswrapper[4730]: I0930 10:16:40.398585 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="801c2fbd-0d18-4528-9baf-8ace34906cfa" path="/var/lib/kubelet/pods/801c2fbd-0d18-4528-9baf-8ace34906cfa/volumes" Sep 30 10:16:40 crc kubenswrapper[4730]: I0930 10:16:40.399466 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="835855e8-ece2-4510-848b-f2851ba7aaa4" path="/var/lib/kubelet/pods/835855e8-ece2-4510-848b-f2851ba7aaa4/volumes" Sep 30 10:16:40 crc kubenswrapper[4730]: I0930 10:16:40.400269 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c38c27b7-08c7-4d1e-b151-3b092180bfc9" path="/var/lib/kubelet/pods/c38c27b7-08c7-4d1e-b151-3b092180bfc9/volumes" Sep 30 10:16:42 crc kubenswrapper[4730]: I0930 10:16:42.024861 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-5c12-account-create-w4fmn"] Sep 30 10:16:42 crc kubenswrapper[4730]: I0930 10:16:42.033991 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-5c12-account-create-w4fmn"] Sep 30 10:16:42 crc kubenswrapper[4730]: I0930 10:16:42.400822 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a" path="/var/lib/kubelet/pods/1de6415b-c8dc-47eb-9ec3-aad2fa6ecc0a/volumes" Sep 30 10:16:48 crc kubenswrapper[4730]: I0930 10:16:48.381094 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:16:48 crc kubenswrapper[4730]: E0930 10:16:48.381758 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:16:59 crc kubenswrapper[4730]: I0930 10:16:59.381135 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:16:59 crc kubenswrapper[4730]: E0930 10:16:59.381907 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:17:03 crc kubenswrapper[4730]: I0930 10:17:03.030936 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-9ghfp"] Sep 30 10:17:03 crc kubenswrapper[4730]: I0930 10:17:03.043136 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-g2h6p"] Sep 30 10:17:03 crc kubenswrapper[4730]: I0930 10:17:03.051402 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-g2h6p"] Sep 30 10:17:03 crc kubenswrapper[4730]: I0930 10:17:03.059424 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-9ghfp"] Sep 30 10:17:04 crc kubenswrapper[4730]: I0930 10:17:04.397505 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35caf426-5c87-4f91-ad73-00a113363a23" path="/var/lib/kubelet/pods/35caf426-5c87-4f91-ad73-00a113363a23/volumes" Sep 30 10:17:04 crc kubenswrapper[4730]: I0930 10:17:04.398128 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4" path="/var/lib/kubelet/pods/8338a732-bf5e-4ce3-bf9b-2c00d8bb11e4/volumes" Sep 30 10:17:06 crc kubenswrapper[4730]: I0930 10:17:06.039726 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-zrxjm"] Sep 30 10:17:06 crc kubenswrapper[4730]: I0930 10:17:06.048042 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-zrxjm"] Sep 30 10:17:06 crc kubenswrapper[4730]: I0930 10:17:06.391169 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e670747-fb83-4ee4-a83f-c6d2d06f213e" path="/var/lib/kubelet/pods/9e670747-fb83-4ee4-a83f-c6d2d06f213e/volumes" Sep 30 10:17:06 crc kubenswrapper[4730]: I0930 10:17:06.916819 4730 generic.go:334] "Generic (PLEG): container finished" podID="7378cfdc-fe7f-4956-a2eb-3e49898ed1ee" containerID="ed706224189e623dcca846ac44bd976a99861004af3c3ec6cc610492e737eb33" exitCode=0 Sep 30 10:17:06 crc kubenswrapper[4730]: I0930 10:17:06.916864 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" event={"ID":"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee","Type":"ContainerDied","Data":"ed706224189e623dcca846ac44bd976a99861004af3c3ec6cc610492e737eb33"} Sep 30 10:17:07 crc kubenswrapper[4730]: I0930 10:17:07.035099 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-lqkp9"] Sep 30 10:17:07 crc kubenswrapper[4730]: I0930 10:17:07.047199 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-lqkp9"] Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.310880 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.334446 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-inventory\") pod \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\" (UID: \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\") " Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.334777 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-ssh-key\") pod \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\" (UID: \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\") " Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.334816 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qzk5\" (UniqueName: \"kubernetes.io/projected/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-kube-api-access-6qzk5\") pod \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\" (UID: \"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee\") " Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.341856 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-kube-api-access-6qzk5" (OuterVolumeSpecName: "kube-api-access-6qzk5") pod "7378cfdc-fe7f-4956-a2eb-3e49898ed1ee" (UID: "7378cfdc-fe7f-4956-a2eb-3e49898ed1ee"). InnerVolumeSpecName "kube-api-access-6qzk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.367271 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7378cfdc-fe7f-4956-a2eb-3e49898ed1ee" (UID: "7378cfdc-fe7f-4956-a2eb-3e49898ed1ee"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.370004 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-inventory" (OuterVolumeSpecName: "inventory") pod "7378cfdc-fe7f-4956-a2eb-3e49898ed1ee" (UID: "7378cfdc-fe7f-4956-a2eb-3e49898ed1ee"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.394535 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dc68fe6-3a0f-4069-999a-65cf283c50e2" path="/var/lib/kubelet/pods/0dc68fe6-3a0f-4069-999a-65cf283c50e2/volumes" Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.437144 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.437180 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qzk5\" (UniqueName: \"kubernetes.io/projected/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-kube-api-access-6qzk5\") on node \"crc\" DevicePath \"\"" Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.437191 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.958140 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" event={"ID":"7378cfdc-fe7f-4956-a2eb-3e49898ed1ee","Type":"ContainerDied","Data":"fb8305a6a04acc8d54248acc0de191dcc459e0e75cd17cab2f872ba57b9d82dc"} Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.958189 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb8305a6a04acc8d54248acc0de191dcc459e0e75cd17cab2f872ba57b9d82dc" Sep 30 10:17:08 crc kubenswrapper[4730]: I0930 10:17:08.958280 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.017766 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx"] Sep 30 10:17:09 crc kubenswrapper[4730]: E0930 10:17:09.018363 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7378cfdc-fe7f-4956-a2eb-3e49898ed1ee" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.018399 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="7378cfdc-fe7f-4956-a2eb-3e49898ed1ee" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.018764 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="7378cfdc-fe7f-4956-a2eb-3e49898ed1ee" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.019886 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.022223 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.022507 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.022685 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.025979 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.030684 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx"] Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.059072 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5psv\" (UniqueName: \"kubernetes.io/projected/76058092-f481-4f4d-bf88-18610aadb37c-kube-api-access-b5psv\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx\" (UID: \"76058092-f481-4f4d-bf88-18610aadb37c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.059370 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76058092-f481-4f4d-bf88-18610aadb37c-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx\" (UID: \"76058092-f481-4f4d-bf88-18610aadb37c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.059534 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76058092-f481-4f4d-bf88-18610aadb37c-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx\" (UID: \"76058092-f481-4f4d-bf88-18610aadb37c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.161586 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76058092-f481-4f4d-bf88-18610aadb37c-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx\" (UID: \"76058092-f481-4f4d-bf88-18610aadb37c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.161755 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76058092-f481-4f4d-bf88-18610aadb37c-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx\" (UID: \"76058092-f481-4f4d-bf88-18610aadb37c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.161881 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5psv\" (UniqueName: \"kubernetes.io/projected/76058092-f481-4f4d-bf88-18610aadb37c-kube-api-access-b5psv\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx\" (UID: \"76058092-f481-4f4d-bf88-18610aadb37c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.166034 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76058092-f481-4f4d-bf88-18610aadb37c-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx\" (UID: \"76058092-f481-4f4d-bf88-18610aadb37c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.166042 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76058092-f481-4f4d-bf88-18610aadb37c-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx\" (UID: \"76058092-f481-4f4d-bf88-18610aadb37c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.190410 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5psv\" (UniqueName: \"kubernetes.io/projected/76058092-f481-4f4d-bf88-18610aadb37c-kube-api-access-b5psv\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx\" (UID: \"76058092-f481-4f4d-bf88-18610aadb37c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.353041 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.845369 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx"] Sep 30 10:17:09 crc kubenswrapper[4730]: I0930 10:17:09.968575 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" event={"ID":"76058092-f481-4f4d-bf88-18610aadb37c","Type":"ContainerStarted","Data":"1bda440ee535ee41e0f6eb2edb44bdfc7031933b0077a11363fb7809c4021721"} Sep 30 10:17:10 crc kubenswrapper[4730]: I0930 10:17:10.382229 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:17:10 crc kubenswrapper[4730]: E0930 10:17:10.382552 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:17:10 crc kubenswrapper[4730]: I0930 10:17:10.980158 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" event={"ID":"76058092-f481-4f4d-bf88-18610aadb37c","Type":"ContainerStarted","Data":"31d1cc5268f922bb3dc96527d76a66c3fee7c866b6b2b49be2eeefe1682348b2"} Sep 30 10:17:10 crc kubenswrapper[4730]: I0930 10:17:10.998416 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" podStartSLOduration=2.519587415 podStartE2EDuration="2.998397599s" podCreationTimestamp="2025-09-30 10:17:08 +0000 UTC" firstStartedPulling="2025-09-30 10:17:09.849940691 +0000 UTC m=+1674.183200684" lastFinishedPulling="2025-09-30 10:17:10.328750875 +0000 UTC m=+1674.662010868" observedRunningTime="2025-09-30 10:17:10.99294663 +0000 UTC m=+1675.326206633" watchObservedRunningTime="2025-09-30 10:17:10.998397599 +0000 UTC m=+1675.331657592" Sep 30 10:17:12 crc kubenswrapper[4730]: I0930 10:17:12.035739 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-b9drt"] Sep 30 10:17:12 crc kubenswrapper[4730]: I0930 10:17:12.052869 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-b9drt"] Sep 30 10:17:12 crc kubenswrapper[4730]: I0930 10:17:12.392480 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="914446c5-8bb0-4025-aa12-1ddd46cda6d0" path="/var/lib/kubelet/pods/914446c5-8bb0-4025-aa12-1ddd46cda6d0/volumes" Sep 30 10:17:15 crc kubenswrapper[4730]: I0930 10:17:15.037188 4730 generic.go:334] "Generic (PLEG): container finished" podID="76058092-f481-4f4d-bf88-18610aadb37c" containerID="31d1cc5268f922bb3dc96527d76a66c3fee7c866b6b2b49be2eeefe1682348b2" exitCode=0 Sep 30 10:17:15 crc kubenswrapper[4730]: I0930 10:17:15.037245 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" event={"ID":"76058092-f481-4f4d-bf88-18610aadb37c","Type":"ContainerDied","Data":"31d1cc5268f922bb3dc96527d76a66c3fee7c866b6b2b49be2eeefe1682348b2"} Sep 30 10:17:16 crc kubenswrapper[4730]: I0930 10:17:16.436464 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" Sep 30 10:17:16 crc kubenswrapper[4730]: I0930 10:17:16.506076 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76058092-f481-4f4d-bf88-18610aadb37c-inventory\") pod \"76058092-f481-4f4d-bf88-18610aadb37c\" (UID: \"76058092-f481-4f4d-bf88-18610aadb37c\") " Sep 30 10:17:16 crc kubenswrapper[4730]: I0930 10:17:16.506184 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76058092-f481-4f4d-bf88-18610aadb37c-ssh-key\") pod \"76058092-f481-4f4d-bf88-18610aadb37c\" (UID: \"76058092-f481-4f4d-bf88-18610aadb37c\") " Sep 30 10:17:16 crc kubenswrapper[4730]: I0930 10:17:16.506371 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5psv\" (UniqueName: \"kubernetes.io/projected/76058092-f481-4f4d-bf88-18610aadb37c-kube-api-access-b5psv\") pod \"76058092-f481-4f4d-bf88-18610aadb37c\" (UID: \"76058092-f481-4f4d-bf88-18610aadb37c\") " Sep 30 10:17:16 crc kubenswrapper[4730]: I0930 10:17:16.511600 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76058092-f481-4f4d-bf88-18610aadb37c-kube-api-access-b5psv" (OuterVolumeSpecName: "kube-api-access-b5psv") pod "76058092-f481-4f4d-bf88-18610aadb37c" (UID: "76058092-f481-4f4d-bf88-18610aadb37c"). InnerVolumeSpecName "kube-api-access-b5psv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:17:16 crc kubenswrapper[4730]: I0930 10:17:16.538373 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76058092-f481-4f4d-bf88-18610aadb37c-inventory" (OuterVolumeSpecName: "inventory") pod "76058092-f481-4f4d-bf88-18610aadb37c" (UID: "76058092-f481-4f4d-bf88-18610aadb37c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:17:16 crc kubenswrapper[4730]: I0930 10:17:16.549334 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76058092-f481-4f4d-bf88-18610aadb37c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "76058092-f481-4f4d-bf88-18610aadb37c" (UID: "76058092-f481-4f4d-bf88-18610aadb37c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:17:16 crc kubenswrapper[4730]: I0930 10:17:16.608637 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76058092-f481-4f4d-bf88-18610aadb37c-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:17:16 crc kubenswrapper[4730]: I0930 10:17:16.608672 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5psv\" (UniqueName: \"kubernetes.io/projected/76058092-f481-4f4d-bf88-18610aadb37c-kube-api-access-b5psv\") on node \"crc\" DevicePath \"\"" Sep 30 10:17:16 crc kubenswrapper[4730]: I0930 10:17:16.608684 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76058092-f481-4f4d-bf88-18610aadb37c-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.036214 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8780-account-create-xzbgl"] Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.043389 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8780-account-create-xzbgl"] Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.057220 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" event={"ID":"76058092-f481-4f4d-bf88-18610aadb37c","Type":"ContainerDied","Data":"1bda440ee535ee41e0f6eb2edb44bdfc7031933b0077a11363fb7809c4021721"} Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.057285 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1bda440ee535ee41e0f6eb2edb44bdfc7031933b0077a11363fb7809c4021721" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.057291 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.139839 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9"] Sep 30 10:17:17 crc kubenswrapper[4730]: E0930 10:17:17.140843 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76058092-f481-4f4d-bf88-18610aadb37c" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.140870 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="76058092-f481-4f4d-bf88-18610aadb37c" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.141141 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="76058092-f481-4f4d-bf88-18610aadb37c" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.144382 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.146547 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.146970 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.147222 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.147287 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.155705 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9"] Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.221434 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8w9jj\" (UniqueName: \"kubernetes.io/projected/39a3cdfe-e568-41af-9a8b-88525dc448de-kube-api-access-8w9jj\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9\" (UID: \"39a3cdfe-e568-41af-9a8b-88525dc448de\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.221515 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/39a3cdfe-e568-41af-9a8b-88525dc448de-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9\" (UID: \"39a3cdfe-e568-41af-9a8b-88525dc448de\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.221550 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/39a3cdfe-e568-41af-9a8b-88525dc448de-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9\" (UID: \"39a3cdfe-e568-41af-9a8b-88525dc448de\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.323390 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/39a3cdfe-e568-41af-9a8b-88525dc448de-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9\" (UID: \"39a3cdfe-e568-41af-9a8b-88525dc448de\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.323947 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/39a3cdfe-e568-41af-9a8b-88525dc448de-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9\" (UID: \"39a3cdfe-e568-41af-9a8b-88525dc448de\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.324127 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8w9jj\" (UniqueName: \"kubernetes.io/projected/39a3cdfe-e568-41af-9a8b-88525dc448de-kube-api-access-8w9jj\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9\" (UID: \"39a3cdfe-e568-41af-9a8b-88525dc448de\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.327571 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/39a3cdfe-e568-41af-9a8b-88525dc448de-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9\" (UID: \"39a3cdfe-e568-41af-9a8b-88525dc448de\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.328570 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/39a3cdfe-e568-41af-9a8b-88525dc448de-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9\" (UID: \"39a3cdfe-e568-41af-9a8b-88525dc448de\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.345361 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8w9jj\" (UniqueName: \"kubernetes.io/projected/39a3cdfe-e568-41af-9a8b-88525dc448de-kube-api-access-8w9jj\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9\" (UID: \"39a3cdfe-e568-41af-9a8b-88525dc448de\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" Sep 30 10:17:17 crc kubenswrapper[4730]: I0930 10:17:17.460284 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" Sep 30 10:17:18 crc kubenswrapper[4730]: I0930 10:17:18.023642 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9"] Sep 30 10:17:18 crc kubenswrapper[4730]: I0930 10:17:18.067229 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" event={"ID":"39a3cdfe-e568-41af-9a8b-88525dc448de","Type":"ContainerStarted","Data":"652267776a9abd725e102f56275c7a51da6b7836c3cc596de007e8cb29d27fca"} Sep 30 10:17:18 crc kubenswrapper[4730]: I0930 10:17:18.394024 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce75a3c3-26fd-44b7-82c9-fe9edf0285fb" path="/var/lib/kubelet/pods/ce75a3c3-26fd-44b7-82c9-fe9edf0285fb/volumes" Sep 30 10:17:19 crc kubenswrapper[4730]: I0930 10:17:19.077450 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" event={"ID":"39a3cdfe-e568-41af-9a8b-88525dc448de","Type":"ContainerStarted","Data":"6d4a0ffdefdd28b27e93c3e105638672bb45fab240df9d1c04536e5d96da363b"} Sep 30 10:17:19 crc kubenswrapper[4730]: I0930 10:17:19.095139 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" podStartSLOduration=1.300387113 podStartE2EDuration="2.095120187s" podCreationTimestamp="2025-09-30 10:17:17 +0000 UTC" firstStartedPulling="2025-09-30 10:17:18.029691717 +0000 UTC m=+1682.362951710" lastFinishedPulling="2025-09-30 10:17:18.824424771 +0000 UTC m=+1683.157684784" observedRunningTime="2025-09-30 10:17:19.093077944 +0000 UTC m=+1683.426337937" watchObservedRunningTime="2025-09-30 10:17:19.095120187 +0000 UTC m=+1683.428380190" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.458218 4730 scope.go:117] "RemoveContainer" containerID="d3670ec3a9701dd9ff30e35035e432537d3c7549dec022d7dcfb55ce743c6d5a" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.490464 4730 scope.go:117] "RemoveContainer" containerID="8109803dfa723493b6d947432a8c0eece2256f69bc8f851b33750e5b32df299e" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.545024 4730 scope.go:117] "RemoveContainer" containerID="5ab2c7a83e471040613b45bb037b3115907eef1d25dbb0a4a1e57cda4db60eb6" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.597750 4730 scope.go:117] "RemoveContainer" containerID="3c4f613823f91f03848acdb85599402e22d0c9e82601ce86c67cbc20cfb1abcd" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.641940 4730 scope.go:117] "RemoveContainer" containerID="7460154e675bd3f2277e724831914427d8b12f95ee175b1ab533adf3ab9dad47" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.688757 4730 scope.go:117] "RemoveContainer" containerID="05867dce8da04c08197385d735995f18e092ab8c86e8025fabd04400ed987251" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.730798 4730 scope.go:117] "RemoveContainer" containerID="cc7a7ecf9857678d29345ce498e3b8a2aada250518e6e6732272d075f3903dee" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.751918 4730 scope.go:117] "RemoveContainer" containerID="eb786361a6739649eda2a6096de792a7ad71d8c4af630060dbe747389c811438" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.770908 4730 scope.go:117] "RemoveContainer" containerID="ad67f1ae198f1a9e8882d912eaf493f81ccdd573952c684bd5c5e5400478e833" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.812222 4730 scope.go:117] "RemoveContainer" containerID="779ecda634a65afefa3d576c756fe0a9d9d227fea94c080c59a5426b95bbb0c4" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.861657 4730 scope.go:117] "RemoveContainer" containerID="a625614874f466d491036e6d2bfd4d978110c68da3dcf20258720f5c49f93acf" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.886516 4730 scope.go:117] "RemoveContainer" containerID="fe67a7fa98146f0625851ff0f4258a3c3efbc57804d83781b0d5e4e768317831" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.920870 4730 scope.go:117] "RemoveContainer" containerID="443a624f63f51b54a1283255625c41610ae2a8fd98889be3b262b5eb0d5d3157" Sep 30 10:17:21 crc kubenswrapper[4730]: I0930 10:17:21.948699 4730 scope.go:117] "RemoveContainer" containerID="2c2d89fa4ee358a5640c900e249c0beb20e2d16b9064e459053bc3f9233d4044" Sep 30 10:17:22 crc kubenswrapper[4730]: I0930 10:17:22.033941 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/watcher-db-sync-47ddk"] Sep 30 10:17:22 crc kubenswrapper[4730]: I0930 10:17:22.047788 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/watcher-db-sync-47ddk"] Sep 30 10:17:22 crc kubenswrapper[4730]: I0930 10:17:22.392211 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f947945d-0a37-4104-95b5-d3437cd60556" path="/var/lib/kubelet/pods/f947945d-0a37-4104-95b5-d3437cd60556/volumes" Sep 30 10:17:23 crc kubenswrapper[4730]: I0930 10:17:23.027981 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-18fc-account-create-9j7xr"] Sep 30 10:17:23 crc kubenswrapper[4730]: I0930 10:17:23.037823 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-18fc-account-create-9j7xr"] Sep 30 10:17:24 crc kubenswrapper[4730]: I0930 10:17:24.381423 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:17:24 crc kubenswrapper[4730]: E0930 10:17:24.381742 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:17:24 crc kubenswrapper[4730]: I0930 10:17:24.403059 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bee9a35-25c0-44b5-8160-a2787eeea901" path="/var/lib/kubelet/pods/4bee9a35-25c0-44b5-8160-a2787eeea901/volumes" Sep 30 10:17:25 crc kubenswrapper[4730]: I0930 10:17:25.044326 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-daf3-account-create-5rdlr"] Sep 30 10:17:25 crc kubenswrapper[4730]: I0930 10:17:25.059022 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-daf3-account-create-5rdlr"] Sep 30 10:17:26 crc kubenswrapper[4730]: I0930 10:17:26.395106 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ccc082d-d035-4fa0-9a6b-76fa8b89c055" path="/var/lib/kubelet/pods/4ccc082d-d035-4fa0-9a6b-76fa8b89c055/volumes" Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.029821 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-gcmkp"] Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.037190 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-gcmkp"] Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.391134 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64e70143-f93b-4808-b388-4faaa7f8e51d" path="/var/lib/kubelet/pods/64e70143-f93b-4808-b388-4faaa7f8e51d/volumes" Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.602132 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-n8nx2"] Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.606075 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.613788 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n8nx2"] Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.773876 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jjmr\" (UniqueName: \"kubernetes.io/projected/f3575076-9cb3-4c45-859f-87159b323401-kube-api-access-9jjmr\") pod \"redhat-operators-n8nx2\" (UID: \"f3575076-9cb3-4c45-859f-87159b323401\") " pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.774057 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3575076-9cb3-4c45-859f-87159b323401-utilities\") pod \"redhat-operators-n8nx2\" (UID: \"f3575076-9cb3-4c45-859f-87159b323401\") " pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.774092 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3575076-9cb3-4c45-859f-87159b323401-catalog-content\") pod \"redhat-operators-n8nx2\" (UID: \"f3575076-9cb3-4c45-859f-87159b323401\") " pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.875794 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jjmr\" (UniqueName: \"kubernetes.io/projected/f3575076-9cb3-4c45-859f-87159b323401-kube-api-access-9jjmr\") pod \"redhat-operators-n8nx2\" (UID: \"f3575076-9cb3-4c45-859f-87159b323401\") " pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.875892 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3575076-9cb3-4c45-859f-87159b323401-utilities\") pod \"redhat-operators-n8nx2\" (UID: \"f3575076-9cb3-4c45-859f-87159b323401\") " pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.875917 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3575076-9cb3-4c45-859f-87159b323401-catalog-content\") pod \"redhat-operators-n8nx2\" (UID: \"f3575076-9cb3-4c45-859f-87159b323401\") " pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.876486 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3575076-9cb3-4c45-859f-87159b323401-catalog-content\") pod \"redhat-operators-n8nx2\" (UID: \"f3575076-9cb3-4c45-859f-87159b323401\") " pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.876536 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3575076-9cb3-4c45-859f-87159b323401-utilities\") pod \"redhat-operators-n8nx2\" (UID: \"f3575076-9cb3-4c45-859f-87159b323401\") " pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.896329 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jjmr\" (UniqueName: \"kubernetes.io/projected/f3575076-9cb3-4c45-859f-87159b323401-kube-api-access-9jjmr\") pod \"redhat-operators-n8nx2\" (UID: \"f3575076-9cb3-4c45-859f-87159b323401\") " pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:30 crc kubenswrapper[4730]: I0930 10:17:30.928399 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:31 crc kubenswrapper[4730]: I0930 10:17:31.476947 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n8nx2"] Sep 30 10:17:32 crc kubenswrapper[4730]: I0930 10:17:32.214941 4730 generic.go:334] "Generic (PLEG): container finished" podID="f3575076-9cb3-4c45-859f-87159b323401" containerID="34cb0bb6979391a25b22b4a0b449a19e54023e89e3ed34d3503e098c71d375ed" exitCode=0 Sep 30 10:17:32 crc kubenswrapper[4730]: I0930 10:17:32.215016 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n8nx2" event={"ID":"f3575076-9cb3-4c45-859f-87159b323401","Type":"ContainerDied","Data":"34cb0bb6979391a25b22b4a0b449a19e54023e89e3ed34d3503e098c71d375ed"} Sep 30 10:17:32 crc kubenswrapper[4730]: I0930 10:17:32.215406 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n8nx2" event={"ID":"f3575076-9cb3-4c45-859f-87159b323401","Type":"ContainerStarted","Data":"f6951a5ec309e0e61deb7bfcbb370ac2c4baeeda9eb996c6255a53f48e48a844"} Sep 30 10:17:33 crc kubenswrapper[4730]: I0930 10:17:33.041835 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-5qwrj"] Sep 30 10:17:33 crc kubenswrapper[4730]: I0930 10:17:33.055596 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-5qwrj"] Sep 30 10:17:34 crc kubenswrapper[4730]: I0930 10:17:34.236790 4730 generic.go:334] "Generic (PLEG): container finished" podID="f3575076-9cb3-4c45-859f-87159b323401" containerID="ae29e9e14c0364074b848fa87e3a659d9a30169b26b75c024f1e69ed6cfd1567" exitCode=0 Sep 30 10:17:34 crc kubenswrapper[4730]: I0930 10:17:34.236877 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n8nx2" event={"ID":"f3575076-9cb3-4c45-859f-87159b323401","Type":"ContainerDied","Data":"ae29e9e14c0364074b848fa87e3a659d9a30169b26b75c024f1e69ed6cfd1567"} Sep 30 10:17:34 crc kubenswrapper[4730]: I0930 10:17:34.393530 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1b317d0-1ec7-4bb5-9218-e853eaecbe43" path="/var/lib/kubelet/pods/d1b317d0-1ec7-4bb5-9218-e853eaecbe43/volumes" Sep 30 10:17:35 crc kubenswrapper[4730]: I0930 10:17:35.246641 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n8nx2" event={"ID":"f3575076-9cb3-4c45-859f-87159b323401","Type":"ContainerStarted","Data":"af49319d79531da02686217a78261ce220ee1bcc3d86319d6c91a24383dfff82"} Sep 30 10:17:35 crc kubenswrapper[4730]: I0930 10:17:35.265498 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-n8nx2" podStartSLOduration=2.636425284 podStartE2EDuration="5.265479524s" podCreationTimestamp="2025-09-30 10:17:30 +0000 UTC" firstStartedPulling="2025-09-30 10:17:32.217399104 +0000 UTC m=+1696.550659097" lastFinishedPulling="2025-09-30 10:17:34.846453344 +0000 UTC m=+1699.179713337" observedRunningTime="2025-09-30 10:17:35.263863182 +0000 UTC m=+1699.597123195" watchObservedRunningTime="2025-09-30 10:17:35.265479524 +0000 UTC m=+1699.598739517" Sep 30 10:17:37 crc kubenswrapper[4730]: I0930 10:17:37.380986 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:17:37 crc kubenswrapper[4730]: E0930 10:17:37.381494 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:17:40 crc kubenswrapper[4730]: I0930 10:17:40.928587 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:40 crc kubenswrapper[4730]: I0930 10:17:40.928915 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:40 crc kubenswrapper[4730]: I0930 10:17:40.979432 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:41 crc kubenswrapper[4730]: I0930 10:17:41.360502 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:41 crc kubenswrapper[4730]: I0930 10:17:41.413447 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n8nx2"] Sep 30 10:17:42 crc kubenswrapper[4730]: I0930 10:17:42.042942 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-fj5rw"] Sep 30 10:17:42 crc kubenswrapper[4730]: I0930 10:17:42.051628 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-fj5rw"] Sep 30 10:17:42 crc kubenswrapper[4730]: I0930 10:17:42.447139 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fc71401-9b3c-455c-9a56-28c6fcbde898" path="/var/lib/kubelet/pods/7fc71401-9b3c-455c-9a56-28c6fcbde898/volumes" Sep 30 10:17:43 crc kubenswrapper[4730]: I0930 10:17:43.316640 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-n8nx2" podUID="f3575076-9cb3-4c45-859f-87159b323401" containerName="registry-server" containerID="cri-o://af49319d79531da02686217a78261ce220ee1bcc3d86319d6c91a24383dfff82" gracePeriod=2 Sep 30 10:17:43 crc kubenswrapper[4730]: I0930 10:17:43.799387 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:43 crc kubenswrapper[4730]: I0930 10:17:43.935284 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3575076-9cb3-4c45-859f-87159b323401-utilities\") pod \"f3575076-9cb3-4c45-859f-87159b323401\" (UID: \"f3575076-9cb3-4c45-859f-87159b323401\") " Sep 30 10:17:43 crc kubenswrapper[4730]: I0930 10:17:43.935443 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3575076-9cb3-4c45-859f-87159b323401-catalog-content\") pod \"f3575076-9cb3-4c45-859f-87159b323401\" (UID: \"f3575076-9cb3-4c45-859f-87159b323401\") " Sep 30 10:17:43 crc kubenswrapper[4730]: I0930 10:17:43.935758 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jjmr\" (UniqueName: \"kubernetes.io/projected/f3575076-9cb3-4c45-859f-87159b323401-kube-api-access-9jjmr\") pod \"f3575076-9cb3-4c45-859f-87159b323401\" (UID: \"f3575076-9cb3-4c45-859f-87159b323401\") " Sep 30 10:17:43 crc kubenswrapper[4730]: I0930 10:17:43.936693 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3575076-9cb3-4c45-859f-87159b323401-utilities" (OuterVolumeSpecName: "utilities") pod "f3575076-9cb3-4c45-859f-87159b323401" (UID: "f3575076-9cb3-4c45-859f-87159b323401"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:17:43 crc kubenswrapper[4730]: I0930 10:17:43.943529 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3575076-9cb3-4c45-859f-87159b323401-kube-api-access-9jjmr" (OuterVolumeSpecName: "kube-api-access-9jjmr") pod "f3575076-9cb3-4c45-859f-87159b323401" (UID: "f3575076-9cb3-4c45-859f-87159b323401"). InnerVolumeSpecName "kube-api-access-9jjmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.038235 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3575076-9cb3-4c45-859f-87159b323401-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.038269 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jjmr\" (UniqueName: \"kubernetes.io/projected/f3575076-9cb3-4c45-859f-87159b323401-kube-api-access-9jjmr\") on node \"crc\" DevicePath \"\"" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.327222 4730 generic.go:334] "Generic (PLEG): container finished" podID="f3575076-9cb3-4c45-859f-87159b323401" containerID="af49319d79531da02686217a78261ce220ee1bcc3d86319d6c91a24383dfff82" exitCode=0 Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.327266 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n8nx2" event={"ID":"f3575076-9cb3-4c45-859f-87159b323401","Type":"ContainerDied","Data":"af49319d79531da02686217a78261ce220ee1bcc3d86319d6c91a24383dfff82"} Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.327293 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n8nx2" event={"ID":"f3575076-9cb3-4c45-859f-87159b323401","Type":"ContainerDied","Data":"f6951a5ec309e0e61deb7bfcbb370ac2c4baeeda9eb996c6255a53f48e48a844"} Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.327312 4730 scope.go:117] "RemoveContainer" containerID="af49319d79531da02686217a78261ce220ee1bcc3d86319d6c91a24383dfff82" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.327267 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n8nx2" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.352693 4730 scope.go:117] "RemoveContainer" containerID="ae29e9e14c0364074b848fa87e3a659d9a30169b26b75c024f1e69ed6cfd1567" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.381069 4730 scope.go:117] "RemoveContainer" containerID="34cb0bb6979391a25b22b4a0b449a19e54023e89e3ed34d3503e098c71d375ed" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.424720 4730 scope.go:117] "RemoveContainer" containerID="af49319d79531da02686217a78261ce220ee1bcc3d86319d6c91a24383dfff82" Sep 30 10:17:44 crc kubenswrapper[4730]: E0930 10:17:44.425041 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af49319d79531da02686217a78261ce220ee1bcc3d86319d6c91a24383dfff82\": container with ID starting with af49319d79531da02686217a78261ce220ee1bcc3d86319d6c91a24383dfff82 not found: ID does not exist" containerID="af49319d79531da02686217a78261ce220ee1bcc3d86319d6c91a24383dfff82" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.425072 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af49319d79531da02686217a78261ce220ee1bcc3d86319d6c91a24383dfff82"} err="failed to get container status \"af49319d79531da02686217a78261ce220ee1bcc3d86319d6c91a24383dfff82\": rpc error: code = NotFound desc = could not find container \"af49319d79531da02686217a78261ce220ee1bcc3d86319d6c91a24383dfff82\": container with ID starting with af49319d79531da02686217a78261ce220ee1bcc3d86319d6c91a24383dfff82 not found: ID does not exist" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.425093 4730 scope.go:117] "RemoveContainer" containerID="ae29e9e14c0364074b848fa87e3a659d9a30169b26b75c024f1e69ed6cfd1567" Sep 30 10:17:44 crc kubenswrapper[4730]: E0930 10:17:44.425326 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae29e9e14c0364074b848fa87e3a659d9a30169b26b75c024f1e69ed6cfd1567\": container with ID starting with ae29e9e14c0364074b848fa87e3a659d9a30169b26b75c024f1e69ed6cfd1567 not found: ID does not exist" containerID="ae29e9e14c0364074b848fa87e3a659d9a30169b26b75c024f1e69ed6cfd1567" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.425361 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae29e9e14c0364074b848fa87e3a659d9a30169b26b75c024f1e69ed6cfd1567"} err="failed to get container status \"ae29e9e14c0364074b848fa87e3a659d9a30169b26b75c024f1e69ed6cfd1567\": rpc error: code = NotFound desc = could not find container \"ae29e9e14c0364074b848fa87e3a659d9a30169b26b75c024f1e69ed6cfd1567\": container with ID starting with ae29e9e14c0364074b848fa87e3a659d9a30169b26b75c024f1e69ed6cfd1567 not found: ID does not exist" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.425387 4730 scope.go:117] "RemoveContainer" containerID="34cb0bb6979391a25b22b4a0b449a19e54023e89e3ed34d3503e098c71d375ed" Sep 30 10:17:44 crc kubenswrapper[4730]: E0930 10:17:44.425749 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34cb0bb6979391a25b22b4a0b449a19e54023e89e3ed34d3503e098c71d375ed\": container with ID starting with 34cb0bb6979391a25b22b4a0b449a19e54023e89e3ed34d3503e098c71d375ed not found: ID does not exist" containerID="34cb0bb6979391a25b22b4a0b449a19e54023e89e3ed34d3503e098c71d375ed" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.425808 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34cb0bb6979391a25b22b4a0b449a19e54023e89e3ed34d3503e098c71d375ed"} err="failed to get container status \"34cb0bb6979391a25b22b4a0b449a19e54023e89e3ed34d3503e098c71d375ed\": rpc error: code = NotFound desc = could not find container \"34cb0bb6979391a25b22b4a0b449a19e54023e89e3ed34d3503e098c71d375ed\": container with ID starting with 34cb0bb6979391a25b22b4a0b449a19e54023e89e3ed34d3503e098c71d375ed not found: ID does not exist" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.818993 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3575076-9cb3-4c45-859f-87159b323401-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f3575076-9cb3-4c45-859f-87159b323401" (UID: "f3575076-9cb3-4c45-859f-87159b323401"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.853262 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3575076-9cb3-4c45-859f-87159b323401-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.960995 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n8nx2"] Sep 30 10:17:44 crc kubenswrapper[4730]: I0930 10:17:44.969954 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-n8nx2"] Sep 30 10:17:46 crc kubenswrapper[4730]: I0930 10:17:46.393293 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3575076-9cb3-4c45-859f-87159b323401" path="/var/lib/kubelet/pods/f3575076-9cb3-4c45-859f-87159b323401/volumes" Sep 30 10:17:48 crc kubenswrapper[4730]: I0930 10:17:48.381775 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:17:48 crc kubenswrapper[4730]: E0930 10:17:48.382426 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:17:59 crc kubenswrapper[4730]: I0930 10:17:59.026293 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-cmvzl"] Sep 30 10:17:59 crc kubenswrapper[4730]: I0930 10:17:59.034308 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-cmvzl"] Sep 30 10:18:00 crc kubenswrapper[4730]: I0930 10:18:00.392960 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="577c636c-9e1c-4e65-b164-dcc8e200d7c1" path="/var/lib/kubelet/pods/577c636c-9e1c-4e65-b164-dcc8e200d7c1/volumes" Sep 30 10:18:01 crc kubenswrapper[4730]: I0930 10:18:01.381556 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:18:01 crc kubenswrapper[4730]: E0930 10:18:01.381933 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:18:12 crc kubenswrapper[4730]: I0930 10:18:12.381557 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:18:12 crc kubenswrapper[4730]: E0930 10:18:12.382415 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:18:15 crc kubenswrapper[4730]: I0930 10:18:15.634272 4730 generic.go:334] "Generic (PLEG): container finished" podID="39a3cdfe-e568-41af-9a8b-88525dc448de" containerID="6d4a0ffdefdd28b27e93c3e105638672bb45fab240df9d1c04536e5d96da363b" exitCode=0 Sep 30 10:18:15 crc kubenswrapper[4730]: I0930 10:18:15.634351 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" event={"ID":"39a3cdfe-e568-41af-9a8b-88525dc448de","Type":"ContainerDied","Data":"6d4a0ffdefdd28b27e93c3e105638672bb45fab240df9d1c04536e5d96da363b"} Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.056760 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.213731 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8w9jj\" (UniqueName: \"kubernetes.io/projected/39a3cdfe-e568-41af-9a8b-88525dc448de-kube-api-access-8w9jj\") pod \"39a3cdfe-e568-41af-9a8b-88525dc448de\" (UID: \"39a3cdfe-e568-41af-9a8b-88525dc448de\") " Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.213823 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/39a3cdfe-e568-41af-9a8b-88525dc448de-inventory\") pod \"39a3cdfe-e568-41af-9a8b-88525dc448de\" (UID: \"39a3cdfe-e568-41af-9a8b-88525dc448de\") " Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.214052 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/39a3cdfe-e568-41af-9a8b-88525dc448de-ssh-key\") pod \"39a3cdfe-e568-41af-9a8b-88525dc448de\" (UID: \"39a3cdfe-e568-41af-9a8b-88525dc448de\") " Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.219381 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39a3cdfe-e568-41af-9a8b-88525dc448de-kube-api-access-8w9jj" (OuterVolumeSpecName: "kube-api-access-8w9jj") pod "39a3cdfe-e568-41af-9a8b-88525dc448de" (UID: "39a3cdfe-e568-41af-9a8b-88525dc448de"). InnerVolumeSpecName "kube-api-access-8w9jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.241793 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39a3cdfe-e568-41af-9a8b-88525dc448de-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "39a3cdfe-e568-41af-9a8b-88525dc448de" (UID: "39a3cdfe-e568-41af-9a8b-88525dc448de"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.242438 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39a3cdfe-e568-41af-9a8b-88525dc448de-inventory" (OuterVolumeSpecName: "inventory") pod "39a3cdfe-e568-41af-9a8b-88525dc448de" (UID: "39a3cdfe-e568-41af-9a8b-88525dc448de"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.317661 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/39a3cdfe-e568-41af-9a8b-88525dc448de-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.317849 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8w9jj\" (UniqueName: \"kubernetes.io/projected/39a3cdfe-e568-41af-9a8b-88525dc448de-kube-api-access-8w9jj\") on node \"crc\" DevicePath \"\"" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.317953 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/39a3cdfe-e568-41af-9a8b-88525dc448de-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.655722 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" event={"ID":"39a3cdfe-e568-41af-9a8b-88525dc448de","Type":"ContainerDied","Data":"652267776a9abd725e102f56275c7a51da6b7836c3cc596de007e8cb29d27fca"} Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.656328 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="652267776a9abd725e102f56275c7a51da6b7836c3cc596de007e8cb29d27fca" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.655784 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.783765 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8btdm"] Sep 30 10:18:17 crc kubenswrapper[4730]: E0930 10:18:17.784262 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3575076-9cb3-4c45-859f-87159b323401" containerName="extract-content" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.784286 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3575076-9cb3-4c45-859f-87159b323401" containerName="extract-content" Sep 30 10:18:17 crc kubenswrapper[4730]: E0930 10:18:17.784311 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39a3cdfe-e568-41af-9a8b-88525dc448de" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.784321 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="39a3cdfe-e568-41af-9a8b-88525dc448de" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:18:17 crc kubenswrapper[4730]: E0930 10:18:17.784337 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3575076-9cb3-4c45-859f-87159b323401" containerName="registry-server" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.784346 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3575076-9cb3-4c45-859f-87159b323401" containerName="registry-server" Sep 30 10:18:17 crc kubenswrapper[4730]: E0930 10:18:17.784375 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3575076-9cb3-4c45-859f-87159b323401" containerName="extract-utilities" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.784384 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3575076-9cb3-4c45-859f-87159b323401" containerName="extract-utilities" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.784613 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="39a3cdfe-e568-41af-9a8b-88525dc448de" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.784659 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3575076-9cb3-4c45-859f-87159b323401" containerName="registry-server" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.785427 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.787849 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.788222 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.788419 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.788693 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.800535 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8btdm"] Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.980421 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3e2a7720-3a68-4362-bf36-309b615a90d8-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8btdm\" (UID: \"3e2a7720-3a68-4362-bf36-309b615a90d8\") " pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.981285 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/3e2a7720-3a68-4362-bf36-309b615a90d8-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8btdm\" (UID: \"3e2a7720-3a68-4362-bf36-309b615a90d8\") " pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" Sep 30 10:18:17 crc kubenswrapper[4730]: I0930 10:18:17.981459 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpb9r\" (UniqueName: \"kubernetes.io/projected/3e2a7720-3a68-4362-bf36-309b615a90d8-kube-api-access-xpb9r\") pod \"ssh-known-hosts-edpm-deployment-8btdm\" (UID: \"3e2a7720-3a68-4362-bf36-309b615a90d8\") " pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" Sep 30 10:18:18 crc kubenswrapper[4730]: I0930 10:18:18.083676 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3e2a7720-3a68-4362-bf36-309b615a90d8-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8btdm\" (UID: \"3e2a7720-3a68-4362-bf36-309b615a90d8\") " pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" Sep 30 10:18:18 crc kubenswrapper[4730]: I0930 10:18:18.084028 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/3e2a7720-3a68-4362-bf36-309b615a90d8-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8btdm\" (UID: \"3e2a7720-3a68-4362-bf36-309b615a90d8\") " pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" Sep 30 10:18:18 crc kubenswrapper[4730]: I0930 10:18:18.084207 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpb9r\" (UniqueName: \"kubernetes.io/projected/3e2a7720-3a68-4362-bf36-309b615a90d8-kube-api-access-xpb9r\") pod \"ssh-known-hosts-edpm-deployment-8btdm\" (UID: \"3e2a7720-3a68-4362-bf36-309b615a90d8\") " pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" Sep 30 10:18:18 crc kubenswrapper[4730]: I0930 10:18:18.087899 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/3e2a7720-3a68-4362-bf36-309b615a90d8-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8btdm\" (UID: \"3e2a7720-3a68-4362-bf36-309b615a90d8\") " pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" Sep 30 10:18:18 crc kubenswrapper[4730]: I0930 10:18:18.095196 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3e2a7720-3a68-4362-bf36-309b615a90d8-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8btdm\" (UID: \"3e2a7720-3a68-4362-bf36-309b615a90d8\") " pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" Sep 30 10:18:18 crc kubenswrapper[4730]: I0930 10:18:18.102817 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpb9r\" (UniqueName: \"kubernetes.io/projected/3e2a7720-3a68-4362-bf36-309b615a90d8-kube-api-access-xpb9r\") pod \"ssh-known-hosts-edpm-deployment-8btdm\" (UID: \"3e2a7720-3a68-4362-bf36-309b615a90d8\") " pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" Sep 30 10:18:18 crc kubenswrapper[4730]: I0930 10:18:18.103671 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" Sep 30 10:18:18 crc kubenswrapper[4730]: I0930 10:18:18.658511 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8btdm"] Sep 30 10:18:18 crc kubenswrapper[4730]: I0930 10:18:18.673129 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" event={"ID":"3e2a7720-3a68-4362-bf36-309b615a90d8","Type":"ContainerStarted","Data":"4646bc8cd9617e2e5963d7203a4b42b21b40e287d268529d742d995cb7276dd9"} Sep 30 10:18:19 crc kubenswrapper[4730]: I0930 10:18:19.683126 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" event={"ID":"3e2a7720-3a68-4362-bf36-309b615a90d8","Type":"ContainerStarted","Data":"f6a311497fee41998076d4bcaae584e4852882de010269bf4f4e4322b0ab9cd7"} Sep 30 10:18:19 crc kubenswrapper[4730]: I0930 10:18:19.703408 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" podStartSLOduration=2.098599845 podStartE2EDuration="2.703386514s" podCreationTimestamp="2025-09-30 10:18:17 +0000 UTC" firstStartedPulling="2025-09-30 10:18:18.658907127 +0000 UTC m=+1742.992167120" lastFinishedPulling="2025-09-30 10:18:19.263693776 +0000 UTC m=+1743.596953789" observedRunningTime="2025-09-30 10:18:19.695848681 +0000 UTC m=+1744.029108674" watchObservedRunningTime="2025-09-30 10:18:19.703386514 +0000 UTC m=+1744.036646507" Sep 30 10:18:21 crc kubenswrapper[4730]: I0930 10:18:21.045210 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-wrvww"] Sep 30 10:18:21 crc kubenswrapper[4730]: I0930 10:18:21.054153 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-wrvww"] Sep 30 10:18:22 crc kubenswrapper[4730]: I0930 10:18:22.227578 4730 scope.go:117] "RemoveContainer" containerID="98307790e39834692f9e054993bc684b2f8357a2a577bbc1719af4a10fca0927" Sep 30 10:18:22 crc kubenswrapper[4730]: I0930 10:18:22.282354 4730 scope.go:117] "RemoveContainer" containerID="a689eda78939a9e26e83604ee763aa90f44f1f184284552b53c614c665ea54b1" Sep 30 10:18:22 crc kubenswrapper[4730]: I0930 10:18:22.302324 4730 scope.go:117] "RemoveContainer" containerID="94569ff68ed078a7a7205f05e47b9f78a7ae562146af2648048462abbdf3cb11" Sep 30 10:18:22 crc kubenswrapper[4730]: I0930 10:18:22.355362 4730 scope.go:117] "RemoveContainer" containerID="db9ce9066063546a453449bd45807592f03643733df7fdeb5f1f204ff80fd0fc" Sep 30 10:18:22 crc kubenswrapper[4730]: I0930 10:18:22.398244 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="488816e9-d4e6-4956-9671-c9de4118821c" path="/var/lib/kubelet/pods/488816e9-d4e6-4956-9671-c9de4118821c/volumes" Sep 30 10:18:22 crc kubenswrapper[4730]: I0930 10:18:22.414351 4730 scope.go:117] "RemoveContainer" containerID="d2f35c02d2be018649cdd9cfc8229e1ca2014710bd17f7dda84aa85f84ed5ac9" Sep 30 10:18:22 crc kubenswrapper[4730]: I0930 10:18:22.438823 4730 scope.go:117] "RemoveContainer" containerID="0b14d0d5ef1f55874424dcf50a7700149498d19a754323e68e72ddac52d24ebe" Sep 30 10:18:22 crc kubenswrapper[4730]: I0930 10:18:22.506411 4730 scope.go:117] "RemoveContainer" containerID="bb516522601ec0b341f35b611fbd192370924e388303d36a652911ab98f7ee13" Sep 30 10:18:22 crc kubenswrapper[4730]: I0930 10:18:22.536723 4730 scope.go:117] "RemoveContainer" containerID="2e5a066c48939e2a63fbcdf187ddc22bea6805126dde61c5600111bf897ca943" Sep 30 10:18:22 crc kubenswrapper[4730]: I0930 10:18:22.554695 4730 scope.go:117] "RemoveContainer" containerID="9c9993ef51641126f6c41d89ea704dee0c15e26bcde6e19a8725ff9299e24232" Sep 30 10:18:24 crc kubenswrapper[4730]: I0930 10:18:24.030916 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-8vbfh"] Sep 30 10:18:24 crc kubenswrapper[4730]: I0930 10:18:24.045930 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-svtfd"] Sep 30 10:18:24 crc kubenswrapper[4730]: I0930 10:18:24.057630 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-v6ms2"] Sep 30 10:18:24 crc kubenswrapper[4730]: I0930 10:18:24.067217 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-8vbfh"] Sep 30 10:18:24 crc kubenswrapper[4730]: I0930 10:18:24.074344 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-svtfd"] Sep 30 10:18:24 crc kubenswrapper[4730]: I0930 10:18:24.081085 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-v6ms2"] Sep 30 10:18:24 crc kubenswrapper[4730]: I0930 10:18:24.381704 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:18:24 crc kubenswrapper[4730]: E0930 10:18:24.382024 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:18:24 crc kubenswrapper[4730]: I0930 10:18:24.396082 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6750d3c8-c106-4c5a-a591-a243a9135831" path="/var/lib/kubelet/pods/6750d3c8-c106-4c5a-a591-a243a9135831/volumes" Sep 30 10:18:24 crc kubenswrapper[4730]: I0930 10:18:24.396579 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a23804b2-1fe7-4b61-8b65-3d4be3166140" path="/var/lib/kubelet/pods/a23804b2-1fe7-4b61-8b65-3d4be3166140/volumes" Sep 30 10:18:24 crc kubenswrapper[4730]: I0930 10:18:24.397184 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fee047bc-cc3d-4044-9045-955a116357df" path="/var/lib/kubelet/pods/fee047bc-cc3d-4044-9045-955a116357df/volumes" Sep 30 10:18:26 crc kubenswrapper[4730]: I0930 10:18:26.764186 4730 generic.go:334] "Generic (PLEG): container finished" podID="3e2a7720-3a68-4362-bf36-309b615a90d8" containerID="f6a311497fee41998076d4bcaae584e4852882de010269bf4f4e4322b0ab9cd7" exitCode=0 Sep 30 10:18:26 crc kubenswrapper[4730]: I0930 10:18:26.764272 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" event={"ID":"3e2a7720-3a68-4362-bf36-309b615a90d8","Type":"ContainerDied","Data":"f6a311497fee41998076d4bcaae584e4852882de010269bf4f4e4322b0ab9cd7"} Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.164826 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.284223 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3e2a7720-3a68-4362-bf36-309b615a90d8-ssh-key-openstack-edpm-ipam\") pod \"3e2a7720-3a68-4362-bf36-309b615a90d8\" (UID: \"3e2a7720-3a68-4362-bf36-309b615a90d8\") " Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.284354 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/3e2a7720-3a68-4362-bf36-309b615a90d8-inventory-0\") pod \"3e2a7720-3a68-4362-bf36-309b615a90d8\" (UID: \"3e2a7720-3a68-4362-bf36-309b615a90d8\") " Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.284442 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpb9r\" (UniqueName: \"kubernetes.io/projected/3e2a7720-3a68-4362-bf36-309b615a90d8-kube-api-access-xpb9r\") pod \"3e2a7720-3a68-4362-bf36-309b615a90d8\" (UID: \"3e2a7720-3a68-4362-bf36-309b615a90d8\") " Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.290101 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e2a7720-3a68-4362-bf36-309b615a90d8-kube-api-access-xpb9r" (OuterVolumeSpecName: "kube-api-access-xpb9r") pod "3e2a7720-3a68-4362-bf36-309b615a90d8" (UID: "3e2a7720-3a68-4362-bf36-309b615a90d8"). InnerVolumeSpecName "kube-api-access-xpb9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.312742 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e2a7720-3a68-4362-bf36-309b615a90d8-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "3e2a7720-3a68-4362-bf36-309b615a90d8" (UID: "3e2a7720-3a68-4362-bf36-309b615a90d8"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.316203 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e2a7720-3a68-4362-bf36-309b615a90d8-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "3e2a7720-3a68-4362-bf36-309b615a90d8" (UID: "3e2a7720-3a68-4362-bf36-309b615a90d8"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.386837 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpb9r\" (UniqueName: \"kubernetes.io/projected/3e2a7720-3a68-4362-bf36-309b615a90d8-kube-api-access-xpb9r\") on node \"crc\" DevicePath \"\"" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.386993 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/3e2a7720-3a68-4362-bf36-309b615a90d8-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.387012 4730 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/3e2a7720-3a68-4362-bf36-309b615a90d8-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.780233 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" event={"ID":"3e2a7720-3a68-4362-bf36-309b615a90d8","Type":"ContainerDied","Data":"4646bc8cd9617e2e5963d7203a4b42b21b40e287d268529d742d995cb7276dd9"} Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.780274 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4646bc8cd9617e2e5963d7203a4b42b21b40e287d268529d742d995cb7276dd9" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.780282 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8btdm" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.861824 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n"] Sep 30 10:18:28 crc kubenswrapper[4730]: E0930 10:18:28.862241 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e2a7720-3a68-4362-bf36-309b615a90d8" containerName="ssh-known-hosts-edpm-deployment" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.862262 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e2a7720-3a68-4362-bf36-309b615a90d8" containerName="ssh-known-hosts-edpm-deployment" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.862503 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e2a7720-3a68-4362-bf36-309b615a90d8" containerName="ssh-known-hosts-edpm-deployment" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.863211 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.872489 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.872741 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.872962 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n"] Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.873026 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.874183 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.998862 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btdpj\" (UniqueName: \"kubernetes.io/projected/937c0dd4-da7e-4fb3-9588-a13534fc6821-kube-api-access-btdpj\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cr67n\" (UID: \"937c0dd4-da7e-4fb3-9588-a13534fc6821\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.998910 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/937c0dd4-da7e-4fb3-9588-a13534fc6821-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cr67n\" (UID: \"937c0dd4-da7e-4fb3-9588-a13534fc6821\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" Sep 30 10:18:28 crc kubenswrapper[4730]: I0930 10:18:28.998976 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/937c0dd4-da7e-4fb3-9588-a13534fc6821-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cr67n\" (UID: \"937c0dd4-da7e-4fb3-9588-a13534fc6821\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" Sep 30 10:18:29 crc kubenswrapper[4730]: I0930 10:18:29.100834 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btdpj\" (UniqueName: \"kubernetes.io/projected/937c0dd4-da7e-4fb3-9588-a13534fc6821-kube-api-access-btdpj\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cr67n\" (UID: \"937c0dd4-da7e-4fb3-9588-a13534fc6821\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" Sep 30 10:18:29 crc kubenswrapper[4730]: I0930 10:18:29.100893 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/937c0dd4-da7e-4fb3-9588-a13534fc6821-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cr67n\" (UID: \"937c0dd4-da7e-4fb3-9588-a13534fc6821\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" Sep 30 10:18:29 crc kubenswrapper[4730]: I0930 10:18:29.100950 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/937c0dd4-da7e-4fb3-9588-a13534fc6821-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cr67n\" (UID: \"937c0dd4-da7e-4fb3-9588-a13534fc6821\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" Sep 30 10:18:29 crc kubenswrapper[4730]: I0930 10:18:29.107163 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/937c0dd4-da7e-4fb3-9588-a13534fc6821-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cr67n\" (UID: \"937c0dd4-da7e-4fb3-9588-a13534fc6821\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" Sep 30 10:18:29 crc kubenswrapper[4730]: I0930 10:18:29.108699 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/937c0dd4-da7e-4fb3-9588-a13534fc6821-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cr67n\" (UID: \"937c0dd4-da7e-4fb3-9588-a13534fc6821\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" Sep 30 10:18:29 crc kubenswrapper[4730]: I0930 10:18:29.118871 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btdpj\" (UniqueName: \"kubernetes.io/projected/937c0dd4-da7e-4fb3-9588-a13534fc6821-kube-api-access-btdpj\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-cr67n\" (UID: \"937c0dd4-da7e-4fb3-9588-a13534fc6821\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" Sep 30 10:18:29 crc kubenswrapper[4730]: I0930 10:18:29.179526 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" Sep 30 10:18:29 crc kubenswrapper[4730]: I0930 10:18:29.673149 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n"] Sep 30 10:18:29 crc kubenswrapper[4730]: I0930 10:18:29.791490 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" event={"ID":"937c0dd4-da7e-4fb3-9588-a13534fc6821","Type":"ContainerStarted","Data":"8cf3d0349785e7d9fd57d29e201995c30c8d1d6b201a3b99c030970238b4a0a3"} Sep 30 10:18:30 crc kubenswrapper[4730]: I0930 10:18:30.801712 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" event={"ID":"937c0dd4-da7e-4fb3-9588-a13534fc6821","Type":"ContainerStarted","Data":"c4cb6a2ecd20b17c3d3be299b076b15ae067be9a54441b204d478c285ee5e6c8"} Sep 30 10:18:30 crc kubenswrapper[4730]: I0930 10:18:30.821437 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" podStartSLOduration=2.33837392 podStartE2EDuration="2.821420507s" podCreationTimestamp="2025-09-30 10:18:28 +0000 UTC" firstStartedPulling="2025-09-30 10:18:29.675028546 +0000 UTC m=+1754.008288539" lastFinishedPulling="2025-09-30 10:18:30.158075143 +0000 UTC m=+1754.491335126" observedRunningTime="2025-09-30 10:18:30.817990298 +0000 UTC m=+1755.151250291" watchObservedRunningTime="2025-09-30 10:18:30.821420507 +0000 UTC m=+1755.154680500" Sep 30 10:18:37 crc kubenswrapper[4730]: I0930 10:18:37.052121 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-6770-account-create-7j85m"] Sep 30 10:18:37 crc kubenswrapper[4730]: I0930 10:18:37.062313 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-6770-account-create-7j85m"] Sep 30 10:18:38 crc kubenswrapper[4730]: I0930 10:18:38.380986 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:18:38 crc kubenswrapper[4730]: E0930 10:18:38.382678 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:18:38 crc kubenswrapper[4730]: I0930 10:18:38.393250 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51b81319-5b51-4d86-8e0f-d5c955ea145b" path="/var/lib/kubelet/pods/51b81319-5b51-4d86-8e0f-d5c955ea145b/volumes" Sep 30 10:18:38 crc kubenswrapper[4730]: I0930 10:18:38.890329 4730 generic.go:334] "Generic (PLEG): container finished" podID="937c0dd4-da7e-4fb3-9588-a13534fc6821" containerID="c4cb6a2ecd20b17c3d3be299b076b15ae067be9a54441b204d478c285ee5e6c8" exitCode=0 Sep 30 10:18:38 crc kubenswrapper[4730]: I0930 10:18:38.890381 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" event={"ID":"937c0dd4-da7e-4fb3-9588-a13534fc6821","Type":"ContainerDied","Data":"c4cb6a2ecd20b17c3d3be299b076b15ae067be9a54441b204d478c285ee5e6c8"} Sep 30 10:18:39 crc kubenswrapper[4730]: I0930 10:18:39.030465 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-5004-account-create-kzhh4"] Sep 30 10:18:39 crc kubenswrapper[4730]: I0930 10:18:39.039150 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-beff-account-create-w69vt"] Sep 30 10:18:39 crc kubenswrapper[4730]: I0930 10:18:39.045924 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-5004-account-create-kzhh4"] Sep 30 10:18:39 crc kubenswrapper[4730]: I0930 10:18:39.052930 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-beff-account-create-w69vt"] Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.287887 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.391253 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a4ff200-9ad7-4a4b-afe8-4170071efc67" path="/var/lib/kubelet/pods/3a4ff200-9ad7-4a4b-afe8-4170071efc67/volumes" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.391944 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7070bd97-620f-4ad8-b7c1-044902ae6857" path="/var/lib/kubelet/pods/7070bd97-620f-4ad8-b7c1-044902ae6857/volumes" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.437697 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/937c0dd4-da7e-4fb3-9588-a13534fc6821-ssh-key\") pod \"937c0dd4-da7e-4fb3-9588-a13534fc6821\" (UID: \"937c0dd4-da7e-4fb3-9588-a13534fc6821\") " Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.437948 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btdpj\" (UniqueName: \"kubernetes.io/projected/937c0dd4-da7e-4fb3-9588-a13534fc6821-kube-api-access-btdpj\") pod \"937c0dd4-da7e-4fb3-9588-a13534fc6821\" (UID: \"937c0dd4-da7e-4fb3-9588-a13534fc6821\") " Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.437983 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/937c0dd4-da7e-4fb3-9588-a13534fc6821-inventory\") pod \"937c0dd4-da7e-4fb3-9588-a13534fc6821\" (UID: \"937c0dd4-da7e-4fb3-9588-a13534fc6821\") " Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.442805 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/937c0dd4-da7e-4fb3-9588-a13534fc6821-kube-api-access-btdpj" (OuterVolumeSpecName: "kube-api-access-btdpj") pod "937c0dd4-da7e-4fb3-9588-a13534fc6821" (UID: "937c0dd4-da7e-4fb3-9588-a13534fc6821"). InnerVolumeSpecName "kube-api-access-btdpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.463943 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/937c0dd4-da7e-4fb3-9588-a13534fc6821-inventory" (OuterVolumeSpecName: "inventory") pod "937c0dd4-da7e-4fb3-9588-a13534fc6821" (UID: "937c0dd4-da7e-4fb3-9588-a13534fc6821"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.464336 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/937c0dd4-da7e-4fb3-9588-a13534fc6821-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "937c0dd4-da7e-4fb3-9588-a13534fc6821" (UID: "937c0dd4-da7e-4fb3-9588-a13534fc6821"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.540114 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/937c0dd4-da7e-4fb3-9588-a13534fc6821-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.540383 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btdpj\" (UniqueName: \"kubernetes.io/projected/937c0dd4-da7e-4fb3-9588-a13534fc6821-kube-api-access-btdpj\") on node \"crc\" DevicePath \"\"" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.540394 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/937c0dd4-da7e-4fb3-9588-a13534fc6821-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.907344 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" event={"ID":"937c0dd4-da7e-4fb3-9588-a13534fc6821","Type":"ContainerDied","Data":"8cf3d0349785e7d9fd57d29e201995c30c8d1d6b201a3b99c030970238b4a0a3"} Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.907384 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8cf3d0349785e7d9fd57d29e201995c30c8d1d6b201a3b99c030970238b4a0a3" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.907400 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.979970 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8"] Sep 30 10:18:40 crc kubenswrapper[4730]: E0930 10:18:40.980439 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="937c0dd4-da7e-4fb3-9588-a13534fc6821" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.980462 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="937c0dd4-da7e-4fb3-9588-a13534fc6821" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.980747 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="937c0dd4-da7e-4fb3-9588-a13534fc6821" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.981428 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.984290 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.984575 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.984644 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:18:40 crc kubenswrapper[4730]: I0930 10:18:40.984933 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:18:41 crc kubenswrapper[4730]: I0930 10:18:41.002674 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8"] Sep 30 10:18:41 crc kubenswrapper[4730]: I0930 10:18:41.151228 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f222edb5-8cc1-4686-aed3-c4deba66caee-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8\" (UID: \"f222edb5-8cc1-4686-aed3-c4deba66caee\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" Sep 30 10:18:41 crc kubenswrapper[4730]: I0930 10:18:41.151307 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r26pn\" (UniqueName: \"kubernetes.io/projected/f222edb5-8cc1-4686-aed3-c4deba66caee-kube-api-access-r26pn\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8\" (UID: \"f222edb5-8cc1-4686-aed3-c4deba66caee\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" Sep 30 10:18:41 crc kubenswrapper[4730]: I0930 10:18:41.151404 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f222edb5-8cc1-4686-aed3-c4deba66caee-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8\" (UID: \"f222edb5-8cc1-4686-aed3-c4deba66caee\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" Sep 30 10:18:41 crc kubenswrapper[4730]: I0930 10:18:41.253495 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f222edb5-8cc1-4686-aed3-c4deba66caee-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8\" (UID: \"f222edb5-8cc1-4686-aed3-c4deba66caee\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" Sep 30 10:18:41 crc kubenswrapper[4730]: I0930 10:18:41.253588 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r26pn\" (UniqueName: \"kubernetes.io/projected/f222edb5-8cc1-4686-aed3-c4deba66caee-kube-api-access-r26pn\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8\" (UID: \"f222edb5-8cc1-4686-aed3-c4deba66caee\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" Sep 30 10:18:41 crc kubenswrapper[4730]: I0930 10:18:41.253672 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f222edb5-8cc1-4686-aed3-c4deba66caee-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8\" (UID: \"f222edb5-8cc1-4686-aed3-c4deba66caee\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" Sep 30 10:18:41 crc kubenswrapper[4730]: I0930 10:18:41.259521 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f222edb5-8cc1-4686-aed3-c4deba66caee-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8\" (UID: \"f222edb5-8cc1-4686-aed3-c4deba66caee\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" Sep 30 10:18:41 crc kubenswrapper[4730]: I0930 10:18:41.259536 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f222edb5-8cc1-4686-aed3-c4deba66caee-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8\" (UID: \"f222edb5-8cc1-4686-aed3-c4deba66caee\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" Sep 30 10:18:41 crc kubenswrapper[4730]: I0930 10:18:41.269036 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r26pn\" (UniqueName: \"kubernetes.io/projected/f222edb5-8cc1-4686-aed3-c4deba66caee-kube-api-access-r26pn\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8\" (UID: \"f222edb5-8cc1-4686-aed3-c4deba66caee\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" Sep 30 10:18:41 crc kubenswrapper[4730]: I0930 10:18:41.298870 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" Sep 30 10:18:41 crc kubenswrapper[4730]: I0930 10:18:41.840063 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8"] Sep 30 10:18:41 crc kubenswrapper[4730]: I0930 10:18:41.916175 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" event={"ID":"f222edb5-8cc1-4686-aed3-c4deba66caee","Type":"ContainerStarted","Data":"3448089402dc7c106158ee94d1447ac4fc4afd95ec8db7f072c36936ea3f6d6b"} Sep 30 10:18:42 crc kubenswrapper[4730]: I0930 10:18:42.929201 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" event={"ID":"f222edb5-8cc1-4686-aed3-c4deba66caee","Type":"ContainerStarted","Data":"4d9d26c90b0435e3b1eb2acbf25b22f2970a3c36aa3001181751f254715e90c7"} Sep 30 10:18:52 crc kubenswrapper[4730]: I0930 10:18:52.011200 4730 generic.go:334] "Generic (PLEG): container finished" podID="f222edb5-8cc1-4686-aed3-c4deba66caee" containerID="4d9d26c90b0435e3b1eb2acbf25b22f2970a3c36aa3001181751f254715e90c7" exitCode=0 Sep 30 10:18:52 crc kubenswrapper[4730]: I0930 10:18:52.011294 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" event={"ID":"f222edb5-8cc1-4686-aed3-c4deba66caee","Type":"ContainerDied","Data":"4d9d26c90b0435e3b1eb2acbf25b22f2970a3c36aa3001181751f254715e90c7"} Sep 30 10:18:52 crc kubenswrapper[4730]: I0930 10:18:52.380665 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:18:52 crc kubenswrapper[4730]: E0930 10:18:52.381090 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:18:53 crc kubenswrapper[4730]: I0930 10:18:53.408679 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" Sep 30 10:18:53 crc kubenswrapper[4730]: I0930 10:18:53.599297 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r26pn\" (UniqueName: \"kubernetes.io/projected/f222edb5-8cc1-4686-aed3-c4deba66caee-kube-api-access-r26pn\") pod \"f222edb5-8cc1-4686-aed3-c4deba66caee\" (UID: \"f222edb5-8cc1-4686-aed3-c4deba66caee\") " Sep 30 10:18:53 crc kubenswrapper[4730]: I0930 10:18:53.599449 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f222edb5-8cc1-4686-aed3-c4deba66caee-ssh-key\") pod \"f222edb5-8cc1-4686-aed3-c4deba66caee\" (UID: \"f222edb5-8cc1-4686-aed3-c4deba66caee\") " Sep 30 10:18:53 crc kubenswrapper[4730]: I0930 10:18:53.599540 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f222edb5-8cc1-4686-aed3-c4deba66caee-inventory\") pod \"f222edb5-8cc1-4686-aed3-c4deba66caee\" (UID: \"f222edb5-8cc1-4686-aed3-c4deba66caee\") " Sep 30 10:18:53 crc kubenswrapper[4730]: I0930 10:18:53.604437 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f222edb5-8cc1-4686-aed3-c4deba66caee-kube-api-access-r26pn" (OuterVolumeSpecName: "kube-api-access-r26pn") pod "f222edb5-8cc1-4686-aed3-c4deba66caee" (UID: "f222edb5-8cc1-4686-aed3-c4deba66caee"). InnerVolumeSpecName "kube-api-access-r26pn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:18:53 crc kubenswrapper[4730]: I0930 10:18:53.627386 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f222edb5-8cc1-4686-aed3-c4deba66caee-inventory" (OuterVolumeSpecName: "inventory") pod "f222edb5-8cc1-4686-aed3-c4deba66caee" (UID: "f222edb5-8cc1-4686-aed3-c4deba66caee"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:18:53 crc kubenswrapper[4730]: I0930 10:18:53.629169 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f222edb5-8cc1-4686-aed3-c4deba66caee-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f222edb5-8cc1-4686-aed3-c4deba66caee" (UID: "f222edb5-8cc1-4686-aed3-c4deba66caee"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:18:53 crc kubenswrapper[4730]: I0930 10:18:53.701815 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r26pn\" (UniqueName: \"kubernetes.io/projected/f222edb5-8cc1-4686-aed3-c4deba66caee-kube-api-access-r26pn\") on node \"crc\" DevicePath \"\"" Sep 30 10:18:53 crc kubenswrapper[4730]: I0930 10:18:53.701884 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f222edb5-8cc1-4686-aed3-c4deba66caee-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:18:53 crc kubenswrapper[4730]: I0930 10:18:53.701895 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f222edb5-8cc1-4686-aed3-c4deba66caee-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:18:54 crc kubenswrapper[4730]: I0930 10:18:54.030921 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" event={"ID":"f222edb5-8cc1-4686-aed3-c4deba66caee","Type":"ContainerDied","Data":"3448089402dc7c106158ee94d1447ac4fc4afd95ec8db7f072c36936ea3f6d6b"} Sep 30 10:18:54 crc kubenswrapper[4730]: I0930 10:18:54.031029 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8" Sep 30 10:18:54 crc kubenswrapper[4730]: I0930 10:18:54.030971 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3448089402dc7c106158ee94d1447ac4fc4afd95ec8db7f072c36936ea3f6d6b" Sep 30 10:19:07 crc kubenswrapper[4730]: I0930 10:19:07.381541 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:19:07 crc kubenswrapper[4730]: E0930 10:19:07.382313 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:19:20 crc kubenswrapper[4730]: I0930 10:19:20.382210 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:19:20 crc kubenswrapper[4730]: E0930 10:19:20.383034 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:19:22 crc kubenswrapper[4730]: I0930 10:19:22.774830 4730 scope.go:117] "RemoveContainer" containerID="d3d23ce695353dea38c25b944bb46bfcc2b5a0c875b22aaf1c965c406e8f8720" Sep 30 10:19:22 crc kubenswrapper[4730]: I0930 10:19:22.811925 4730 scope.go:117] "RemoveContainer" containerID="fac94bc3f86d8140ae61d87a6b36ac3c2a1b3b9138f78ddf9ea350a486d9b1bc" Sep 30 10:19:22 crc kubenswrapper[4730]: I0930 10:19:22.851049 4730 scope.go:117] "RemoveContainer" containerID="8a3ffd5de26ac98419697a858586a7f8735b97fb435bc42d6480d1227d737349" Sep 30 10:19:22 crc kubenswrapper[4730]: I0930 10:19:22.891952 4730 scope.go:117] "RemoveContainer" containerID="514ef0c0b6f7f016c7be6ed49a18e331ed0e8c99322854a31d779f4ece6ba4f5" Sep 30 10:19:22 crc kubenswrapper[4730]: I0930 10:19:22.952139 4730 scope.go:117] "RemoveContainer" containerID="4ec9ea9ccb81335a48818cd75925bbf3eff5c762c81e52bd38adfb2860ee7f80" Sep 30 10:19:22 crc kubenswrapper[4730]: I0930 10:19:22.971730 4730 scope.go:117] "RemoveContainer" containerID="77ac1d3684b99ad78e5a7335a7932053b3d8c3347dc6bc7967706174f0476885" Sep 30 10:19:23 crc kubenswrapper[4730]: I0930 10:19:23.015923 4730 scope.go:117] "RemoveContainer" containerID="a1bbd2e814128f14d567f74f472517b4f72376037b6c592ab724d24024031e71" Sep 30 10:19:31 crc kubenswrapper[4730]: I0930 10:19:31.104549 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-f7qp5"] Sep 30 10:19:31 crc kubenswrapper[4730]: I0930 10:19:31.118104 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-f7qp5"] Sep 30 10:19:32 crc kubenswrapper[4730]: I0930 10:19:32.380986 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:19:32 crc kubenswrapper[4730]: E0930 10:19:32.381513 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:19:32 crc kubenswrapper[4730]: I0930 10:19:32.395035 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baa411cd-87b0-4467-af6d-9a64df6f75b9" path="/var/lib/kubelet/pods/baa411cd-87b0-4467-af6d-9a64df6f75b9/volumes" Sep 30 10:19:46 crc kubenswrapper[4730]: I0930 10:19:46.386813 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:19:47 crc kubenswrapper[4730]: I0930 10:19:47.495238 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"eaa28c0322c72a3838f60d339bc131399ae00513f606d27c5a12509ba2142b15"} Sep 30 10:19:51 crc kubenswrapper[4730]: I0930 10:19:51.054903 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-6psh5"] Sep 30 10:19:51 crc kubenswrapper[4730]: I0930 10:19:51.063788 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-6psh5"] Sep 30 10:19:52 crc kubenswrapper[4730]: I0930 10:19:52.395104 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="075c7eb9-74a6-49ca-a4b1-3fccba0ec354" path="/var/lib/kubelet/pods/075c7eb9-74a6-49ca-a4b1-3fccba0ec354/volumes" Sep 30 10:19:57 crc kubenswrapper[4730]: I0930 10:19:57.079014 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hjjbx"] Sep 30 10:19:57 crc kubenswrapper[4730]: I0930 10:19:57.088140 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-hjjbx"] Sep 30 10:19:58 crc kubenswrapper[4730]: I0930 10:19:58.393838 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="043c0fe0-8c26-466a-bdde-0c6b9917e73f" path="/var/lib/kubelet/pods/043c0fe0-8c26-466a-bdde-0c6b9917e73f/volumes" Sep 30 10:20:23 crc kubenswrapper[4730]: I0930 10:20:23.146507 4730 scope.go:117] "RemoveContainer" containerID="0d11052bbd9c6a833feb43a9effc061106550825f8fcae90ac58eec1e239bc02" Sep 30 10:20:23 crc kubenswrapper[4730]: I0930 10:20:23.191822 4730 scope.go:117] "RemoveContainer" containerID="70a5681709d25a447d57a454fe9ae0deb302be92897371ffa057cbc9c6e37114" Sep 30 10:20:23 crc kubenswrapper[4730]: I0930 10:20:23.254795 4730 scope.go:117] "RemoveContainer" containerID="6bfbce02fb7811a3d6c7e52798c9abf02fea72ab6852cd388f91a6b2132ace1d" Sep 30 10:20:35 crc kubenswrapper[4730]: I0930 10:20:35.046785 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-b5t67"] Sep 30 10:20:35 crc kubenswrapper[4730]: I0930 10:20:35.054817 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-b5t67"] Sep 30 10:20:36 crc kubenswrapper[4730]: I0930 10:20:36.392131 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e721808e-e96b-4395-8acc-cf3416d9a100" path="/var/lib/kubelet/pods/e721808e-e96b-4395-8acc-cf3416d9a100/volumes" Sep 30 10:21:23 crc kubenswrapper[4730]: I0930 10:21:23.371013 4730 scope.go:117] "RemoveContainer" containerID="8e1eef1328d14555fb5c9534a3758cee5a955dff1036a2afd29183885d5b8efb" Sep 30 10:22:02 crc kubenswrapper[4730]: I0930 10:22:02.336547 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:22:02 crc kubenswrapper[4730]: I0930 10:22:02.337132 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:22:32 crc kubenswrapper[4730]: I0930 10:22:32.336733 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:22:32 crc kubenswrapper[4730]: I0930 10:22:32.337342 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:23:02 crc kubenswrapper[4730]: I0930 10:23:02.336593 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:23:02 crc kubenswrapper[4730]: I0930 10:23:02.337201 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:23:02 crc kubenswrapper[4730]: I0930 10:23:02.337254 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 10:23:02 crc kubenswrapper[4730]: I0930 10:23:02.338165 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eaa28c0322c72a3838f60d339bc131399ae00513f606d27c5a12509ba2142b15"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 10:23:02 crc kubenswrapper[4730]: I0930 10:23:02.338224 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://eaa28c0322c72a3838f60d339bc131399ae00513f606d27c5a12509ba2142b15" gracePeriod=600 Sep 30 10:23:03 crc kubenswrapper[4730]: I0930 10:23:03.273728 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="eaa28c0322c72a3838f60d339bc131399ae00513f606d27c5a12509ba2142b15" exitCode=0 Sep 30 10:23:03 crc kubenswrapper[4730]: I0930 10:23:03.273803 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"eaa28c0322c72a3838f60d339bc131399ae00513f606d27c5a12509ba2142b15"} Sep 30 10:23:03 crc kubenswrapper[4730]: I0930 10:23:03.274342 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347"} Sep 30 10:23:03 crc kubenswrapper[4730]: I0930 10:23:03.274364 4730 scope.go:117] "RemoveContainer" containerID="62dd4ca15c6a394c053301d28de02310227d72673a1c1548f40c218bf8c90c44" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.442889 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4mr6r"] Sep 30 10:24:09 crc kubenswrapper[4730]: E0930 10:24:09.443812 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f222edb5-8cc1-4686-aed3-c4deba66caee" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.443826 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="f222edb5-8cc1-4686-aed3-c4deba66caee" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.444457 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="f222edb5-8cc1-4686-aed3-c4deba66caee" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.445913 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.452467 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4mr6r"] Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.592009 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58ded3e5-6216-4d2c-8312-a8f1d50ad082-utilities\") pod \"community-operators-4mr6r\" (UID: \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\") " pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.592252 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v44hm\" (UniqueName: \"kubernetes.io/projected/58ded3e5-6216-4d2c-8312-a8f1d50ad082-kube-api-access-v44hm\") pod \"community-operators-4mr6r\" (UID: \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\") " pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.592479 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58ded3e5-6216-4d2c-8312-a8f1d50ad082-catalog-content\") pod \"community-operators-4mr6r\" (UID: \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\") " pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.694596 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v44hm\" (UniqueName: \"kubernetes.io/projected/58ded3e5-6216-4d2c-8312-a8f1d50ad082-kube-api-access-v44hm\") pod \"community-operators-4mr6r\" (UID: \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\") " pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.695033 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58ded3e5-6216-4d2c-8312-a8f1d50ad082-catalog-content\") pod \"community-operators-4mr6r\" (UID: \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\") " pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.695225 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58ded3e5-6216-4d2c-8312-a8f1d50ad082-utilities\") pod \"community-operators-4mr6r\" (UID: \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\") " pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.695696 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58ded3e5-6216-4d2c-8312-a8f1d50ad082-catalog-content\") pod \"community-operators-4mr6r\" (UID: \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\") " pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.695741 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58ded3e5-6216-4d2c-8312-a8f1d50ad082-utilities\") pod \"community-operators-4mr6r\" (UID: \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\") " pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.719292 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v44hm\" (UniqueName: \"kubernetes.io/projected/58ded3e5-6216-4d2c-8312-a8f1d50ad082-kube-api-access-v44hm\") pod \"community-operators-4mr6r\" (UID: \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\") " pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:09 crc kubenswrapper[4730]: I0930 10:24:09.814475 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:10 crc kubenswrapper[4730]: I0930 10:24:10.321673 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4mr6r"] Sep 30 10:24:10 crc kubenswrapper[4730]: I0930 10:24:10.913315 4730 generic.go:334] "Generic (PLEG): container finished" podID="58ded3e5-6216-4d2c-8312-a8f1d50ad082" containerID="75255a790ee5dd470f87e0c29484503ff13d7ea7b72137de96f6328977b4a69f" exitCode=0 Sep 30 10:24:10 crc kubenswrapper[4730]: I0930 10:24:10.913368 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4mr6r" event={"ID":"58ded3e5-6216-4d2c-8312-a8f1d50ad082","Type":"ContainerDied","Data":"75255a790ee5dd470f87e0c29484503ff13d7ea7b72137de96f6328977b4a69f"} Sep 30 10:24:10 crc kubenswrapper[4730]: I0930 10:24:10.913405 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4mr6r" event={"ID":"58ded3e5-6216-4d2c-8312-a8f1d50ad082","Type":"ContainerStarted","Data":"6c34e3ffe50e5effbc263091b2e29ff3942ec6194386d015214c0e85aac18ced"} Sep 30 10:24:10 crc kubenswrapper[4730]: I0930 10:24:10.915443 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 10:24:13 crc kubenswrapper[4730]: I0930 10:24:13.946476 4730 generic.go:334] "Generic (PLEG): container finished" podID="58ded3e5-6216-4d2c-8312-a8f1d50ad082" containerID="d44f2a6a3e2b29a9ad3d4610916dd630b637b6f80f8d2fa09aba4d3216e04596" exitCode=0 Sep 30 10:24:13 crc kubenswrapper[4730]: I0930 10:24:13.946580 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4mr6r" event={"ID":"58ded3e5-6216-4d2c-8312-a8f1d50ad082","Type":"ContainerDied","Data":"d44f2a6a3e2b29a9ad3d4610916dd630b637b6f80f8d2fa09aba4d3216e04596"} Sep 30 10:24:15 crc kubenswrapper[4730]: I0930 10:24:15.965594 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4mr6r" event={"ID":"58ded3e5-6216-4d2c-8312-a8f1d50ad082","Type":"ContainerStarted","Data":"845c9ae34026f6ccc2980d4be42d7c15b4696789cf051ed4102c2d9ce2eb5fe3"} Sep 30 10:24:15 crc kubenswrapper[4730]: I0930 10:24:15.990003 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4mr6r" podStartSLOduration=2.5932668899999998 podStartE2EDuration="6.989975405s" podCreationTimestamp="2025-09-30 10:24:09 +0000 UTC" firstStartedPulling="2025-09-30 10:24:10.915092436 +0000 UTC m=+2095.248352459" lastFinishedPulling="2025-09-30 10:24:15.311800971 +0000 UTC m=+2099.645060974" observedRunningTime="2025-09-30 10:24:15.982683325 +0000 UTC m=+2100.315943338" watchObservedRunningTime="2025-09-30 10:24:15.989975405 +0000 UTC m=+2100.323235418" Sep 30 10:24:19 crc kubenswrapper[4730]: I0930 10:24:19.814903 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:19 crc kubenswrapper[4730]: I0930 10:24:19.815859 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:19 crc kubenswrapper[4730]: I0930 10:24:19.870938 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:29 crc kubenswrapper[4730]: I0930 10:24:29.868398 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:29 crc kubenswrapper[4730]: I0930 10:24:29.924305 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4mr6r"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.113415 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4mr6r" podUID="58ded3e5-6216-4d2c-8312-a8f1d50ad082" containerName="registry-server" containerID="cri-o://845c9ae34026f6ccc2980d4be42d7c15b4696789cf051ed4102c2d9ce2eb5fe3" gracePeriod=2 Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.205476 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8btdm"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.223241 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8btdm"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.232197 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.241689 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.252774 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.262521 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.272782 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.283456 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.290810 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.302869 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.315216 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.327453 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-n58s8"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.335982 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-9ws2x"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.341699 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-25nqr"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.348439 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-p2hb9"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.355843 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-nkxlk"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.365540 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-nqdpx"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.395607 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39a3cdfe-e568-41af-9a8b-88525dc448de" path="/var/lib/kubelet/pods/39a3cdfe-e568-41af-9a8b-88525dc448de/volumes" Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.397604 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e2a7720-3a68-4362-bf36-309b615a90d8" path="/var/lib/kubelet/pods/3e2a7720-3a68-4362-bf36-309b615a90d8/volumes" Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.399382 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="575ec0cb-9c35-4f41-939d-3d80070464f4" path="/var/lib/kubelet/pods/575ec0cb-9c35-4f41-939d-3d80070464f4/volumes" Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.400073 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7378cfdc-fe7f-4956-a2eb-3e49898ed1ee" path="/var/lib/kubelet/pods/7378cfdc-fe7f-4956-a2eb-3e49898ed1ee/volumes" Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.401298 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76058092-f481-4f4d-bf88-18610aadb37c" path="/var/lib/kubelet/pods/76058092-f481-4f4d-bf88-18610aadb37c/volumes" Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.402016 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94e7b621-95ed-40de-bf21-f3398f10bace" path="/var/lib/kubelet/pods/94e7b621-95ed-40de-bf21-f3398f10bace/volumes" Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.402718 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f222edb5-8cc1-4686-aed3-c4deba66caee" path="/var/lib/kubelet/pods/f222edb5-8cc1-4686-aed3-c4deba66caee/volumes" Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.403376 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-cr67n"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.403416 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-pt2wc"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.403432 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-bjwh2"] Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.580738 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.714602 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58ded3e5-6216-4d2c-8312-a8f1d50ad082-utilities\") pod \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\" (UID: \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\") " Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.714833 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58ded3e5-6216-4d2c-8312-a8f1d50ad082-catalog-content\") pod \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\" (UID: \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\") " Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.714875 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v44hm\" (UniqueName: \"kubernetes.io/projected/58ded3e5-6216-4d2c-8312-a8f1d50ad082-kube-api-access-v44hm\") pod \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\" (UID: \"58ded3e5-6216-4d2c-8312-a8f1d50ad082\") " Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.716285 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58ded3e5-6216-4d2c-8312-a8f1d50ad082-utilities" (OuterVolumeSpecName: "utilities") pod "58ded3e5-6216-4d2c-8312-a8f1d50ad082" (UID: "58ded3e5-6216-4d2c-8312-a8f1d50ad082"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.725764 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58ded3e5-6216-4d2c-8312-a8f1d50ad082-kube-api-access-v44hm" (OuterVolumeSpecName: "kube-api-access-v44hm") pod "58ded3e5-6216-4d2c-8312-a8f1d50ad082" (UID: "58ded3e5-6216-4d2c-8312-a8f1d50ad082"). InnerVolumeSpecName "kube-api-access-v44hm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.770525 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58ded3e5-6216-4d2c-8312-a8f1d50ad082-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58ded3e5-6216-4d2c-8312-a8f1d50ad082" (UID: "58ded3e5-6216-4d2c-8312-a8f1d50ad082"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.817824 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58ded3e5-6216-4d2c-8312-a8f1d50ad082-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.817874 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58ded3e5-6216-4d2c-8312-a8f1d50ad082-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:24:30 crc kubenswrapper[4730]: I0930 10:24:30.817890 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v44hm\" (UniqueName: \"kubernetes.io/projected/58ded3e5-6216-4d2c-8312-a8f1d50ad082-kube-api-access-v44hm\") on node \"crc\" DevicePath \"\"" Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.122787 4730 generic.go:334] "Generic (PLEG): container finished" podID="58ded3e5-6216-4d2c-8312-a8f1d50ad082" containerID="845c9ae34026f6ccc2980d4be42d7c15b4696789cf051ed4102c2d9ce2eb5fe3" exitCode=0 Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.123904 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4mr6r" Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.124192 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4mr6r" event={"ID":"58ded3e5-6216-4d2c-8312-a8f1d50ad082","Type":"ContainerDied","Data":"845c9ae34026f6ccc2980d4be42d7c15b4696789cf051ed4102c2d9ce2eb5fe3"} Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.124231 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4mr6r" event={"ID":"58ded3e5-6216-4d2c-8312-a8f1d50ad082","Type":"ContainerDied","Data":"6c34e3ffe50e5effbc263091b2e29ff3942ec6194386d015214c0e85aac18ced"} Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.124252 4730 scope.go:117] "RemoveContainer" containerID="845c9ae34026f6ccc2980d4be42d7c15b4696789cf051ed4102c2d9ce2eb5fe3" Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.178295 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4mr6r"] Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.189877 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4mr6r"] Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.190894 4730 scope.go:117] "RemoveContainer" containerID="d44f2a6a3e2b29a9ad3d4610916dd630b637b6f80f8d2fa09aba4d3216e04596" Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.236965 4730 scope.go:117] "RemoveContainer" containerID="75255a790ee5dd470f87e0c29484503ff13d7ea7b72137de96f6328977b4a69f" Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.273404 4730 scope.go:117] "RemoveContainer" containerID="845c9ae34026f6ccc2980d4be42d7c15b4696789cf051ed4102c2d9ce2eb5fe3" Sep 30 10:24:31 crc kubenswrapper[4730]: E0930 10:24:31.275451 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"845c9ae34026f6ccc2980d4be42d7c15b4696789cf051ed4102c2d9ce2eb5fe3\": container with ID starting with 845c9ae34026f6ccc2980d4be42d7c15b4696789cf051ed4102c2d9ce2eb5fe3 not found: ID does not exist" containerID="845c9ae34026f6ccc2980d4be42d7c15b4696789cf051ed4102c2d9ce2eb5fe3" Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.275493 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"845c9ae34026f6ccc2980d4be42d7c15b4696789cf051ed4102c2d9ce2eb5fe3"} err="failed to get container status \"845c9ae34026f6ccc2980d4be42d7c15b4696789cf051ed4102c2d9ce2eb5fe3\": rpc error: code = NotFound desc = could not find container \"845c9ae34026f6ccc2980d4be42d7c15b4696789cf051ed4102c2d9ce2eb5fe3\": container with ID starting with 845c9ae34026f6ccc2980d4be42d7c15b4696789cf051ed4102c2d9ce2eb5fe3 not found: ID does not exist" Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.275538 4730 scope.go:117] "RemoveContainer" containerID="d44f2a6a3e2b29a9ad3d4610916dd630b637b6f80f8d2fa09aba4d3216e04596" Sep 30 10:24:31 crc kubenswrapper[4730]: E0930 10:24:31.275954 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d44f2a6a3e2b29a9ad3d4610916dd630b637b6f80f8d2fa09aba4d3216e04596\": container with ID starting with d44f2a6a3e2b29a9ad3d4610916dd630b637b6f80f8d2fa09aba4d3216e04596 not found: ID does not exist" containerID="d44f2a6a3e2b29a9ad3d4610916dd630b637b6f80f8d2fa09aba4d3216e04596" Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.275979 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d44f2a6a3e2b29a9ad3d4610916dd630b637b6f80f8d2fa09aba4d3216e04596"} err="failed to get container status \"d44f2a6a3e2b29a9ad3d4610916dd630b637b6f80f8d2fa09aba4d3216e04596\": rpc error: code = NotFound desc = could not find container \"d44f2a6a3e2b29a9ad3d4610916dd630b637b6f80f8d2fa09aba4d3216e04596\": container with ID starting with d44f2a6a3e2b29a9ad3d4610916dd630b637b6f80f8d2fa09aba4d3216e04596 not found: ID does not exist" Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.275996 4730 scope.go:117] "RemoveContainer" containerID="75255a790ee5dd470f87e0c29484503ff13d7ea7b72137de96f6328977b4a69f" Sep 30 10:24:31 crc kubenswrapper[4730]: E0930 10:24:31.276363 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75255a790ee5dd470f87e0c29484503ff13d7ea7b72137de96f6328977b4a69f\": container with ID starting with 75255a790ee5dd470f87e0c29484503ff13d7ea7b72137de96f6328977b4a69f not found: ID does not exist" containerID="75255a790ee5dd470f87e0c29484503ff13d7ea7b72137de96f6328977b4a69f" Sep 30 10:24:31 crc kubenswrapper[4730]: I0930 10:24:31.276392 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75255a790ee5dd470f87e0c29484503ff13d7ea7b72137de96f6328977b4a69f"} err="failed to get container status \"75255a790ee5dd470f87e0c29484503ff13d7ea7b72137de96f6328977b4a69f\": rpc error: code = NotFound desc = could not find container \"75255a790ee5dd470f87e0c29484503ff13d7ea7b72137de96f6328977b4a69f\": container with ID starting with 75255a790ee5dd470f87e0c29484503ff13d7ea7b72137de96f6328977b4a69f not found: ID does not exist" Sep 30 10:24:32 crc kubenswrapper[4730]: I0930 10:24:32.394721 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27e64b2d-13ec-4303-9468-86b81dd4a2d0" path="/var/lib/kubelet/pods/27e64b2d-13ec-4303-9468-86b81dd4a2d0/volumes" Sep 30 10:24:32 crc kubenswrapper[4730]: I0930 10:24:32.395418 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="573673c2-6d89-478c-bcae-c6a1b77e0c94" path="/var/lib/kubelet/pods/573673c2-6d89-478c-bcae-c6a1b77e0c94/volumes" Sep 30 10:24:32 crc kubenswrapper[4730]: I0930 10:24:32.395949 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58ded3e5-6216-4d2c-8312-a8f1d50ad082" path="/var/lib/kubelet/pods/58ded3e5-6216-4d2c-8312-a8f1d50ad082/volumes" Sep 30 10:24:32 crc kubenswrapper[4730]: I0930 10:24:32.397112 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="937c0dd4-da7e-4fb3-9588-a13534fc6821" path="/var/lib/kubelet/pods/937c0dd4-da7e-4fb3-9588-a13534fc6821/volumes" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.409471 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f"] Sep 30 10:24:42 crc kubenswrapper[4730]: E0930 10:24:42.410647 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58ded3e5-6216-4d2c-8312-a8f1d50ad082" containerName="extract-content" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.410666 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="58ded3e5-6216-4d2c-8312-a8f1d50ad082" containerName="extract-content" Sep 30 10:24:42 crc kubenswrapper[4730]: E0930 10:24:42.410705 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58ded3e5-6216-4d2c-8312-a8f1d50ad082" containerName="registry-server" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.410715 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="58ded3e5-6216-4d2c-8312-a8f1d50ad082" containerName="registry-server" Sep 30 10:24:42 crc kubenswrapper[4730]: E0930 10:24:42.410737 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58ded3e5-6216-4d2c-8312-a8f1d50ad082" containerName="extract-utilities" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.410745 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="58ded3e5-6216-4d2c-8312-a8f1d50ad082" containerName="extract-utilities" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.410932 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="58ded3e5-6216-4d2c-8312-a8f1d50ad082" containerName="registry-server" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.411633 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.416561 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.417039 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.417219 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.417387 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.417506 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.419712 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f"] Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.548365 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.548441 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.548512 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.548570 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7qqn\" (UniqueName: \"kubernetes.io/projected/9c876aea-c4ac-4055-953d-9bedb3615be5-kube-api-access-j7qqn\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.548625 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.650593 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.651094 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.651152 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.651207 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.651282 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7qqn\" (UniqueName: \"kubernetes.io/projected/9c876aea-c4ac-4055-953d-9bedb3615be5-kube-api-access-j7qqn\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.659113 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.659157 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.659650 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.660525 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.678350 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7qqn\" (UniqueName: \"kubernetes.io/projected/9c876aea-c4ac-4055-953d-9bedb3615be5-kube-api-access-j7qqn\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:42 crc kubenswrapper[4730]: I0930 10:24:42.747113 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:43 crc kubenswrapper[4730]: I0930 10:24:43.411626 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f"] Sep 30 10:24:44 crc kubenswrapper[4730]: I0930 10:24:44.236681 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" event={"ID":"9c876aea-c4ac-4055-953d-9bedb3615be5","Type":"ContainerStarted","Data":"7b1a7370ed24ef385207b8d87b4adfbe219c94a946a9f188dd2fb6c07720112e"} Sep 30 10:24:45 crc kubenswrapper[4730]: I0930 10:24:45.248946 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" event={"ID":"9c876aea-c4ac-4055-953d-9bedb3615be5","Type":"ContainerStarted","Data":"a106bcc656143a7c7794dbb4b54ef3e8961f7216bd1937b439b9541a6c4da5b1"} Sep 30 10:24:45 crc kubenswrapper[4730]: I0930 10:24:45.284479 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" podStartSLOduration=2.716302971 podStartE2EDuration="3.284453374s" podCreationTimestamp="2025-09-30 10:24:42 +0000 UTC" firstStartedPulling="2025-09-30 10:24:43.414585813 +0000 UTC m=+2127.747845806" lastFinishedPulling="2025-09-30 10:24:43.982736216 +0000 UTC m=+2128.315996209" observedRunningTime="2025-09-30 10:24:45.27701887 +0000 UTC m=+2129.610278873" watchObservedRunningTime="2025-09-30 10:24:45.284453374 +0000 UTC m=+2129.617713367" Sep 30 10:24:57 crc kubenswrapper[4730]: E0930 10:24:57.214835 4730 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c876aea_c4ac_4055_953d_9bedb3615be5.slice/crio-a106bcc656143a7c7794dbb4b54ef3e8961f7216bd1937b439b9541a6c4da5b1.scope\": RecentStats: unable to find data in memory cache]" Sep 30 10:24:57 crc kubenswrapper[4730]: I0930 10:24:57.350235 4730 generic.go:334] "Generic (PLEG): container finished" podID="9c876aea-c4ac-4055-953d-9bedb3615be5" containerID="a106bcc656143a7c7794dbb4b54ef3e8961f7216bd1937b439b9541a6c4da5b1" exitCode=0 Sep 30 10:24:57 crc kubenswrapper[4730]: I0930 10:24:57.350277 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" event={"ID":"9c876aea-c4ac-4055-953d-9bedb3615be5","Type":"ContainerDied","Data":"a106bcc656143a7c7794dbb4b54ef3e8961f7216bd1937b439b9541a6c4da5b1"} Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.744234 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.794467 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-repo-setup-combined-ca-bundle\") pod \"9c876aea-c4ac-4055-953d-9bedb3615be5\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.794872 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-ceph\") pod \"9c876aea-c4ac-4055-953d-9bedb3615be5\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.794950 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-ssh-key\") pod \"9c876aea-c4ac-4055-953d-9bedb3615be5\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.795056 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-inventory\") pod \"9c876aea-c4ac-4055-953d-9bedb3615be5\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.795133 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7qqn\" (UniqueName: \"kubernetes.io/projected/9c876aea-c4ac-4055-953d-9bedb3615be5-kube-api-access-j7qqn\") pod \"9c876aea-c4ac-4055-953d-9bedb3615be5\" (UID: \"9c876aea-c4ac-4055-953d-9bedb3615be5\") " Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.801344 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "9c876aea-c4ac-4055-953d-9bedb3615be5" (UID: "9c876aea-c4ac-4055-953d-9bedb3615be5"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.801467 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-ceph" (OuterVolumeSpecName: "ceph") pod "9c876aea-c4ac-4055-953d-9bedb3615be5" (UID: "9c876aea-c4ac-4055-953d-9bedb3615be5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.801819 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c876aea-c4ac-4055-953d-9bedb3615be5-kube-api-access-j7qqn" (OuterVolumeSpecName: "kube-api-access-j7qqn") pod "9c876aea-c4ac-4055-953d-9bedb3615be5" (UID: "9c876aea-c4ac-4055-953d-9bedb3615be5"). InnerVolumeSpecName "kube-api-access-j7qqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.824296 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-inventory" (OuterVolumeSpecName: "inventory") pod "9c876aea-c4ac-4055-953d-9bedb3615be5" (UID: "9c876aea-c4ac-4055-953d-9bedb3615be5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.843360 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9c876aea-c4ac-4055-953d-9bedb3615be5" (UID: "9c876aea-c4ac-4055-953d-9bedb3615be5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.897370 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7qqn\" (UniqueName: \"kubernetes.io/projected/9c876aea-c4ac-4055-953d-9bedb3615be5-kube-api-access-j7qqn\") on node \"crc\" DevicePath \"\"" Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.897421 4730 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.897438 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.897450 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:24:58 crc kubenswrapper[4730]: I0930 10:24:58.897463 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c876aea-c4ac-4055-953d-9bedb3615be5-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.373562 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" event={"ID":"9c876aea-c4ac-4055-953d-9bedb3615be5","Type":"ContainerDied","Data":"7b1a7370ed24ef385207b8d87b4adfbe219c94a946a9f188dd2fb6c07720112e"} Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.373681 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b1a7370ed24ef385207b8d87b4adfbe219c94a946a9f188dd2fb6c07720112e" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.373750 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.446441 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs"] Sep 30 10:24:59 crc kubenswrapper[4730]: E0930 10:24:59.447027 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c876aea-c4ac-4055-953d-9bedb3615be5" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.447058 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c876aea-c4ac-4055-953d-9bedb3615be5" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.447401 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c876aea-c4ac-4055-953d-9bedb3615be5" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.448468 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.453122 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.453160 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.453185 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.453340 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.453847 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.475306 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs"] Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.508278 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65bxm\" (UniqueName: \"kubernetes.io/projected/96923f77-1ffc-4d73-adf3-33f66499e0f9-kube-api-access-65bxm\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.508343 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.508377 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.508428 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.508461 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.610412 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65bxm\" (UniqueName: \"kubernetes.io/projected/96923f77-1ffc-4d73-adf3-33f66499e0f9-kube-api-access-65bxm\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.610476 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.610510 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.610555 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.610584 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.614864 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.615008 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.615915 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.616030 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.625807 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65bxm\" (UniqueName: \"kubernetes.io/projected/96923f77-1ffc-4d73-adf3-33f66499e0f9-kube-api-access-65bxm\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:24:59 crc kubenswrapper[4730]: I0930 10:24:59.773445 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:25:00 crc kubenswrapper[4730]: I0930 10:25:00.298947 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs"] Sep 30 10:25:00 crc kubenswrapper[4730]: I0930 10:25:00.392797 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" event={"ID":"96923f77-1ffc-4d73-adf3-33f66499e0f9","Type":"ContainerStarted","Data":"c18c9ecc87820f6d73a01d5c31ee7e65b9bbb33f9611c2799040ad8fe18cda98"} Sep 30 10:25:01 crc kubenswrapper[4730]: I0930 10:25:01.804827 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wc9nb"] Sep 30 10:25:01 crc kubenswrapper[4730]: I0930 10:25:01.807254 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:01 crc kubenswrapper[4730]: I0930 10:25:01.818127 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wc9nb"] Sep 30 10:25:01 crc kubenswrapper[4730]: I0930 10:25:01.854939 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-catalog-content\") pod \"redhat-marketplace-wc9nb\" (UID: \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\") " pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:01 crc kubenswrapper[4730]: I0930 10:25:01.855234 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-utilities\") pod \"redhat-marketplace-wc9nb\" (UID: \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\") " pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:01 crc kubenswrapper[4730]: I0930 10:25:01.855347 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7ldv\" (UniqueName: \"kubernetes.io/projected/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-kube-api-access-m7ldv\") pod \"redhat-marketplace-wc9nb\" (UID: \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\") " pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:01 crc kubenswrapper[4730]: I0930 10:25:01.956862 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-catalog-content\") pod \"redhat-marketplace-wc9nb\" (UID: \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\") " pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:01 crc kubenswrapper[4730]: I0930 10:25:01.956910 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-utilities\") pod \"redhat-marketplace-wc9nb\" (UID: \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\") " pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:01 crc kubenswrapper[4730]: I0930 10:25:01.956934 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7ldv\" (UniqueName: \"kubernetes.io/projected/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-kube-api-access-m7ldv\") pod \"redhat-marketplace-wc9nb\" (UID: \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\") " pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:01 crc kubenswrapper[4730]: I0930 10:25:01.957376 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-utilities\") pod \"redhat-marketplace-wc9nb\" (UID: \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\") " pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:01 crc kubenswrapper[4730]: I0930 10:25:01.957688 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-catalog-content\") pod \"redhat-marketplace-wc9nb\" (UID: \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\") " pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:01 crc kubenswrapper[4730]: I0930 10:25:01.987514 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7ldv\" (UniqueName: \"kubernetes.io/projected/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-kube-api-access-m7ldv\") pod \"redhat-marketplace-wc9nb\" (UID: \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\") " pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:02 crc kubenswrapper[4730]: I0930 10:25:02.129784 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:02 crc kubenswrapper[4730]: I0930 10:25:02.336840 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:25:02 crc kubenswrapper[4730]: I0930 10:25:02.336956 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:25:02 crc kubenswrapper[4730]: I0930 10:25:02.403364 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" event={"ID":"96923f77-1ffc-4d73-adf3-33f66499e0f9","Type":"ContainerStarted","Data":"0e6893565ada900c40fd32ee7f045d6564260db362e924074b98852ed9704aa3"} Sep 30 10:25:02 crc kubenswrapper[4730]: I0930 10:25:02.422846 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" podStartSLOduration=2.538470028 podStartE2EDuration="3.422827734s" podCreationTimestamp="2025-09-30 10:24:59 +0000 UTC" firstStartedPulling="2025-09-30 10:25:00.304731648 +0000 UTC m=+2144.637991641" lastFinishedPulling="2025-09-30 10:25:01.189089354 +0000 UTC m=+2145.522349347" observedRunningTime="2025-09-30 10:25:02.419042396 +0000 UTC m=+2146.752302389" watchObservedRunningTime="2025-09-30 10:25:02.422827734 +0000 UTC m=+2146.756087727" Sep 30 10:25:02 crc kubenswrapper[4730]: I0930 10:25:02.601003 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wc9nb"] Sep 30 10:25:02 crc kubenswrapper[4730]: W0930 10:25:02.604803 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcad3aa75_af34_4dc9_b6fd_9e675e4812f2.slice/crio-af58ff25edccf82386910c99a3aa33879a285881d2d8871cac6409fc187ca84d WatchSource:0}: Error finding container af58ff25edccf82386910c99a3aa33879a285881d2d8871cac6409fc187ca84d: Status 404 returned error can't find the container with id af58ff25edccf82386910c99a3aa33879a285881d2d8871cac6409fc187ca84d Sep 30 10:25:03 crc kubenswrapper[4730]: I0930 10:25:03.412565 4730 generic.go:334] "Generic (PLEG): container finished" podID="cad3aa75-af34-4dc9-b6fd-9e675e4812f2" containerID="be70ee52226ba543735c4c4ca28d48cc9a32bd170f4d4e10fc96545d27e4778b" exitCode=0 Sep 30 10:25:03 crc kubenswrapper[4730]: I0930 10:25:03.412697 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wc9nb" event={"ID":"cad3aa75-af34-4dc9-b6fd-9e675e4812f2","Type":"ContainerDied","Data":"be70ee52226ba543735c4c4ca28d48cc9a32bd170f4d4e10fc96545d27e4778b"} Sep 30 10:25:03 crc kubenswrapper[4730]: I0930 10:25:03.412910 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wc9nb" event={"ID":"cad3aa75-af34-4dc9-b6fd-9e675e4812f2","Type":"ContainerStarted","Data":"af58ff25edccf82386910c99a3aa33879a285881d2d8871cac6409fc187ca84d"} Sep 30 10:25:04 crc kubenswrapper[4730]: I0930 10:25:04.804454 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s5ftl"] Sep 30 10:25:04 crc kubenswrapper[4730]: I0930 10:25:04.806754 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:04 crc kubenswrapper[4730]: I0930 10:25:04.827341 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s5ftl"] Sep 30 10:25:04 crc kubenswrapper[4730]: I0930 10:25:04.911949 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-utilities\") pod \"certified-operators-s5ftl\" (UID: \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\") " pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:04 crc kubenswrapper[4730]: I0930 10:25:04.912266 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-catalog-content\") pod \"certified-operators-s5ftl\" (UID: \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\") " pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:04 crc kubenswrapper[4730]: I0930 10:25:04.913113 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcmjr\" (UniqueName: \"kubernetes.io/projected/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-kube-api-access-wcmjr\") pod \"certified-operators-s5ftl\" (UID: \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\") " pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:05 crc kubenswrapper[4730]: I0930 10:25:05.014744 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-utilities\") pod \"certified-operators-s5ftl\" (UID: \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\") " pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:05 crc kubenswrapper[4730]: I0930 10:25:05.015021 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-catalog-content\") pod \"certified-operators-s5ftl\" (UID: \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\") " pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:05 crc kubenswrapper[4730]: I0930 10:25:05.015184 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcmjr\" (UniqueName: \"kubernetes.io/projected/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-kube-api-access-wcmjr\") pod \"certified-operators-s5ftl\" (UID: \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\") " pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:05 crc kubenswrapper[4730]: I0930 10:25:05.015429 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-utilities\") pod \"certified-operators-s5ftl\" (UID: \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\") " pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:05 crc kubenswrapper[4730]: I0930 10:25:05.015501 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-catalog-content\") pod \"certified-operators-s5ftl\" (UID: \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\") " pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:05 crc kubenswrapper[4730]: I0930 10:25:05.035669 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcmjr\" (UniqueName: \"kubernetes.io/projected/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-kube-api-access-wcmjr\") pod \"certified-operators-s5ftl\" (UID: \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\") " pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:05 crc kubenswrapper[4730]: I0930 10:25:05.127072 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:05 crc kubenswrapper[4730]: I0930 10:25:05.445881 4730 generic.go:334] "Generic (PLEG): container finished" podID="cad3aa75-af34-4dc9-b6fd-9e675e4812f2" containerID="eaa21e2b9c818d8e03452dfde6a97d1706f45dc225fffc67e9fb9a48dd9f2ce5" exitCode=0 Sep 30 10:25:05 crc kubenswrapper[4730]: I0930 10:25:05.445922 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wc9nb" event={"ID":"cad3aa75-af34-4dc9-b6fd-9e675e4812f2","Type":"ContainerDied","Data":"eaa21e2b9c818d8e03452dfde6a97d1706f45dc225fffc67e9fb9a48dd9f2ce5"} Sep 30 10:25:05 crc kubenswrapper[4730]: I0930 10:25:05.770303 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s5ftl"] Sep 30 10:25:05 crc kubenswrapper[4730]: W0930 10:25:05.772002 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9ba3192_5498_4e8e_bc2c_e54f2627c51e.slice/crio-21c70e70423dc08ccf362b65cceb28d33a578481bfea5b252990795064a18def WatchSource:0}: Error finding container 21c70e70423dc08ccf362b65cceb28d33a578481bfea5b252990795064a18def: Status 404 returned error can't find the container with id 21c70e70423dc08ccf362b65cceb28d33a578481bfea5b252990795064a18def Sep 30 10:25:06 crc kubenswrapper[4730]: I0930 10:25:06.456562 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wc9nb" event={"ID":"cad3aa75-af34-4dc9-b6fd-9e675e4812f2","Type":"ContainerStarted","Data":"8d4b9725bdf7296ae80443f2fad28d3503d96e701f30125f24eaf71d694479ab"} Sep 30 10:25:06 crc kubenswrapper[4730]: I0930 10:25:06.458440 4730 generic.go:334] "Generic (PLEG): container finished" podID="a9ba3192-5498-4e8e-bc2c-e54f2627c51e" containerID="bba06a3492936820d4f5fd2564b0c6431ee8eb57437f23d2663431cc01dc6c47" exitCode=0 Sep 30 10:25:06 crc kubenswrapper[4730]: I0930 10:25:06.458493 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5ftl" event={"ID":"a9ba3192-5498-4e8e-bc2c-e54f2627c51e","Type":"ContainerDied","Data":"bba06a3492936820d4f5fd2564b0c6431ee8eb57437f23d2663431cc01dc6c47"} Sep 30 10:25:06 crc kubenswrapper[4730]: I0930 10:25:06.458549 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5ftl" event={"ID":"a9ba3192-5498-4e8e-bc2c-e54f2627c51e","Type":"ContainerStarted","Data":"21c70e70423dc08ccf362b65cceb28d33a578481bfea5b252990795064a18def"} Sep 30 10:25:06 crc kubenswrapper[4730]: I0930 10:25:06.475791 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wc9nb" podStartSLOduration=2.8529558919999998 podStartE2EDuration="5.475772081s" podCreationTimestamp="2025-09-30 10:25:01 +0000 UTC" firstStartedPulling="2025-09-30 10:25:03.414934982 +0000 UTC m=+2147.748194975" lastFinishedPulling="2025-09-30 10:25:06.037751171 +0000 UTC m=+2150.371011164" observedRunningTime="2025-09-30 10:25:06.47227363 +0000 UTC m=+2150.805533633" watchObservedRunningTime="2025-09-30 10:25:06.475772081 +0000 UTC m=+2150.809032074" Sep 30 10:25:08 crc kubenswrapper[4730]: I0930 10:25:08.481490 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5ftl" event={"ID":"a9ba3192-5498-4e8e-bc2c-e54f2627c51e","Type":"ContainerStarted","Data":"81f172d754ffac0cf8b17b4363f4d0c304235c3c26049b074305a2a136e61b07"} Sep 30 10:25:09 crc kubenswrapper[4730]: I0930 10:25:09.494722 4730 generic.go:334] "Generic (PLEG): container finished" podID="a9ba3192-5498-4e8e-bc2c-e54f2627c51e" containerID="81f172d754ffac0cf8b17b4363f4d0c304235c3c26049b074305a2a136e61b07" exitCode=0 Sep 30 10:25:09 crc kubenswrapper[4730]: I0930 10:25:09.494913 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5ftl" event={"ID":"a9ba3192-5498-4e8e-bc2c-e54f2627c51e","Type":"ContainerDied","Data":"81f172d754ffac0cf8b17b4363f4d0c304235c3c26049b074305a2a136e61b07"} Sep 30 10:25:11 crc kubenswrapper[4730]: I0930 10:25:11.513886 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5ftl" event={"ID":"a9ba3192-5498-4e8e-bc2c-e54f2627c51e","Type":"ContainerStarted","Data":"daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0"} Sep 30 10:25:11 crc kubenswrapper[4730]: I0930 10:25:11.532998 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s5ftl" podStartSLOduration=3.533784352 podStartE2EDuration="7.532976463s" podCreationTimestamp="2025-09-30 10:25:04 +0000 UTC" firstStartedPulling="2025-09-30 10:25:06.45959268 +0000 UTC m=+2150.792852673" lastFinishedPulling="2025-09-30 10:25:10.458784781 +0000 UTC m=+2154.792044784" observedRunningTime="2025-09-30 10:25:11.52981686 +0000 UTC m=+2155.863076883" watchObservedRunningTime="2025-09-30 10:25:11.532976463 +0000 UTC m=+2155.866236466" Sep 30 10:25:12 crc kubenswrapper[4730]: I0930 10:25:12.130626 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:12 crc kubenswrapper[4730]: I0930 10:25:12.131411 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:12 crc kubenswrapper[4730]: I0930 10:25:12.177683 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:12 crc kubenswrapper[4730]: I0930 10:25:12.567754 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:13 crc kubenswrapper[4730]: I0930 10:25:13.591138 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wc9nb"] Sep 30 10:25:14 crc kubenswrapper[4730]: I0930 10:25:14.544419 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wc9nb" podUID="cad3aa75-af34-4dc9-b6fd-9e675e4812f2" containerName="registry-server" containerID="cri-o://8d4b9725bdf7296ae80443f2fad28d3503d96e701f30125f24eaf71d694479ab" gracePeriod=2 Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.065313 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.127848 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.128147 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.179094 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.215077 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-utilities\") pod \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\" (UID: \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\") " Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.215227 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-catalog-content\") pod \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\" (UID: \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\") " Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.215291 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7ldv\" (UniqueName: \"kubernetes.io/projected/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-kube-api-access-m7ldv\") pod \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\" (UID: \"cad3aa75-af34-4dc9-b6fd-9e675e4812f2\") " Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.217968 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-utilities" (OuterVolumeSpecName: "utilities") pod "cad3aa75-af34-4dc9-b6fd-9e675e4812f2" (UID: "cad3aa75-af34-4dc9-b6fd-9e675e4812f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.225852 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-kube-api-access-m7ldv" (OuterVolumeSpecName: "kube-api-access-m7ldv") pod "cad3aa75-af34-4dc9-b6fd-9e675e4812f2" (UID: "cad3aa75-af34-4dc9-b6fd-9e675e4812f2"). InnerVolumeSpecName "kube-api-access-m7ldv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.237183 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cad3aa75-af34-4dc9-b6fd-9e675e4812f2" (UID: "cad3aa75-af34-4dc9-b6fd-9e675e4812f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.317923 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.318229 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.318305 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7ldv\" (UniqueName: \"kubernetes.io/projected/cad3aa75-af34-4dc9-b6fd-9e675e4812f2-kube-api-access-m7ldv\") on node \"crc\" DevicePath \"\"" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.556592 4730 generic.go:334] "Generic (PLEG): container finished" podID="cad3aa75-af34-4dc9-b6fd-9e675e4812f2" containerID="8d4b9725bdf7296ae80443f2fad28d3503d96e701f30125f24eaf71d694479ab" exitCode=0 Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.556663 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wc9nb" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.556699 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wc9nb" event={"ID":"cad3aa75-af34-4dc9-b6fd-9e675e4812f2","Type":"ContainerDied","Data":"8d4b9725bdf7296ae80443f2fad28d3503d96e701f30125f24eaf71d694479ab"} Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.556784 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wc9nb" event={"ID":"cad3aa75-af34-4dc9-b6fd-9e675e4812f2","Type":"ContainerDied","Data":"af58ff25edccf82386910c99a3aa33879a285881d2d8871cac6409fc187ca84d"} Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.556937 4730 scope.go:117] "RemoveContainer" containerID="8d4b9725bdf7296ae80443f2fad28d3503d96e701f30125f24eaf71d694479ab" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.607970 4730 scope.go:117] "RemoveContainer" containerID="eaa21e2b9c818d8e03452dfde6a97d1706f45dc225fffc67e9fb9a48dd9f2ce5" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.609738 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wc9nb"] Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.629445 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wc9nb"] Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.640873 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.646475 4730 scope.go:117] "RemoveContainer" containerID="be70ee52226ba543735c4c4ca28d48cc9a32bd170f4d4e10fc96545d27e4778b" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.690914 4730 scope.go:117] "RemoveContainer" containerID="8d4b9725bdf7296ae80443f2fad28d3503d96e701f30125f24eaf71d694479ab" Sep 30 10:25:15 crc kubenswrapper[4730]: E0930 10:25:15.691533 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d4b9725bdf7296ae80443f2fad28d3503d96e701f30125f24eaf71d694479ab\": container with ID starting with 8d4b9725bdf7296ae80443f2fad28d3503d96e701f30125f24eaf71d694479ab not found: ID does not exist" containerID="8d4b9725bdf7296ae80443f2fad28d3503d96e701f30125f24eaf71d694479ab" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.691594 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d4b9725bdf7296ae80443f2fad28d3503d96e701f30125f24eaf71d694479ab"} err="failed to get container status \"8d4b9725bdf7296ae80443f2fad28d3503d96e701f30125f24eaf71d694479ab\": rpc error: code = NotFound desc = could not find container \"8d4b9725bdf7296ae80443f2fad28d3503d96e701f30125f24eaf71d694479ab\": container with ID starting with 8d4b9725bdf7296ae80443f2fad28d3503d96e701f30125f24eaf71d694479ab not found: ID does not exist" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.691793 4730 scope.go:117] "RemoveContainer" containerID="eaa21e2b9c818d8e03452dfde6a97d1706f45dc225fffc67e9fb9a48dd9f2ce5" Sep 30 10:25:15 crc kubenswrapper[4730]: E0930 10:25:15.692159 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eaa21e2b9c818d8e03452dfde6a97d1706f45dc225fffc67e9fb9a48dd9f2ce5\": container with ID starting with eaa21e2b9c818d8e03452dfde6a97d1706f45dc225fffc67e9fb9a48dd9f2ce5 not found: ID does not exist" containerID="eaa21e2b9c818d8e03452dfde6a97d1706f45dc225fffc67e9fb9a48dd9f2ce5" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.692189 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaa21e2b9c818d8e03452dfde6a97d1706f45dc225fffc67e9fb9a48dd9f2ce5"} err="failed to get container status \"eaa21e2b9c818d8e03452dfde6a97d1706f45dc225fffc67e9fb9a48dd9f2ce5\": rpc error: code = NotFound desc = could not find container \"eaa21e2b9c818d8e03452dfde6a97d1706f45dc225fffc67e9fb9a48dd9f2ce5\": container with ID starting with eaa21e2b9c818d8e03452dfde6a97d1706f45dc225fffc67e9fb9a48dd9f2ce5 not found: ID does not exist" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.692207 4730 scope.go:117] "RemoveContainer" containerID="be70ee52226ba543735c4c4ca28d48cc9a32bd170f4d4e10fc96545d27e4778b" Sep 30 10:25:15 crc kubenswrapper[4730]: E0930 10:25:15.692673 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be70ee52226ba543735c4c4ca28d48cc9a32bd170f4d4e10fc96545d27e4778b\": container with ID starting with be70ee52226ba543735c4c4ca28d48cc9a32bd170f4d4e10fc96545d27e4778b not found: ID does not exist" containerID="be70ee52226ba543735c4c4ca28d48cc9a32bd170f4d4e10fc96545d27e4778b" Sep 30 10:25:15 crc kubenswrapper[4730]: I0930 10:25:15.692719 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be70ee52226ba543735c4c4ca28d48cc9a32bd170f4d4e10fc96545d27e4778b"} err="failed to get container status \"be70ee52226ba543735c4c4ca28d48cc9a32bd170f4d4e10fc96545d27e4778b\": rpc error: code = NotFound desc = could not find container \"be70ee52226ba543735c4c4ca28d48cc9a32bd170f4d4e10fc96545d27e4778b\": container with ID starting with be70ee52226ba543735c4c4ca28d48cc9a32bd170f4d4e10fc96545d27e4778b not found: ID does not exist" Sep 30 10:25:16 crc kubenswrapper[4730]: I0930 10:25:16.394842 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cad3aa75-af34-4dc9-b6fd-9e675e4812f2" path="/var/lib/kubelet/pods/cad3aa75-af34-4dc9-b6fd-9e675e4812f2/volumes" Sep 30 10:25:17 crc kubenswrapper[4730]: I0930 10:25:17.592369 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s5ftl"] Sep 30 10:25:17 crc kubenswrapper[4730]: I0930 10:25:17.592637 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s5ftl" podUID="a9ba3192-5498-4e8e-bc2c-e54f2627c51e" containerName="registry-server" containerID="cri-o://daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0" gracePeriod=2 Sep 30 10:25:17 crc kubenswrapper[4730]: E0930 10:25:17.738478 4730 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9ba3192_5498_4e8e_bc2c_e54f2627c51e.slice/crio-conmon-daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9ba3192_5498_4e8e_bc2c_e54f2627c51e.slice/crio-daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0.scope\": RecentStats: unable to find data in memory cache]" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.042712 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.081014 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-utilities\") pod \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\" (UID: \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\") " Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.081145 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcmjr\" (UniqueName: \"kubernetes.io/projected/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-kube-api-access-wcmjr\") pod \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\" (UID: \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\") " Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.081321 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-catalog-content\") pod \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\" (UID: \"a9ba3192-5498-4e8e-bc2c-e54f2627c51e\") " Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.081859 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-utilities" (OuterVolumeSpecName: "utilities") pod "a9ba3192-5498-4e8e-bc2c-e54f2627c51e" (UID: "a9ba3192-5498-4e8e-bc2c-e54f2627c51e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.087856 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-kube-api-access-wcmjr" (OuterVolumeSpecName: "kube-api-access-wcmjr") pod "a9ba3192-5498-4e8e-bc2c-e54f2627c51e" (UID: "a9ba3192-5498-4e8e-bc2c-e54f2627c51e"). InnerVolumeSpecName "kube-api-access-wcmjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.131153 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a9ba3192-5498-4e8e-bc2c-e54f2627c51e" (UID: "a9ba3192-5498-4e8e-bc2c-e54f2627c51e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.183634 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcmjr\" (UniqueName: \"kubernetes.io/projected/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-kube-api-access-wcmjr\") on node \"crc\" DevicePath \"\"" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.183668 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.183681 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ba3192-5498-4e8e-bc2c-e54f2627c51e-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.582799 4730 generic.go:334] "Generic (PLEG): container finished" podID="a9ba3192-5498-4e8e-bc2c-e54f2627c51e" containerID="daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0" exitCode=0 Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.582846 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5ftl" event={"ID":"a9ba3192-5498-4e8e-bc2c-e54f2627c51e","Type":"ContainerDied","Data":"daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0"} Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.582877 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s5ftl" event={"ID":"a9ba3192-5498-4e8e-bc2c-e54f2627c51e","Type":"ContainerDied","Data":"21c70e70423dc08ccf362b65cceb28d33a578481bfea5b252990795064a18def"} Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.582897 4730 scope.go:117] "RemoveContainer" containerID="daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.583037 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s5ftl" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.618197 4730 scope.go:117] "RemoveContainer" containerID="81f172d754ffac0cf8b17b4363f4d0c304235c3c26049b074305a2a136e61b07" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.621384 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s5ftl"] Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.629390 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s5ftl"] Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.637695 4730 scope.go:117] "RemoveContainer" containerID="bba06a3492936820d4f5fd2564b0c6431ee8eb57437f23d2663431cc01dc6c47" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.680413 4730 scope.go:117] "RemoveContainer" containerID="daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0" Sep 30 10:25:18 crc kubenswrapper[4730]: E0930 10:25:18.680984 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0\": container with ID starting with daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0 not found: ID does not exist" containerID="daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.681014 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0"} err="failed to get container status \"daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0\": rpc error: code = NotFound desc = could not find container \"daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0\": container with ID starting with daa91b6448633ec920edb6ec4cfff249fcbb4968bb0969f2144697318b4144f0 not found: ID does not exist" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.681032 4730 scope.go:117] "RemoveContainer" containerID="81f172d754ffac0cf8b17b4363f4d0c304235c3c26049b074305a2a136e61b07" Sep 30 10:25:18 crc kubenswrapper[4730]: E0930 10:25:18.681233 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81f172d754ffac0cf8b17b4363f4d0c304235c3c26049b074305a2a136e61b07\": container with ID starting with 81f172d754ffac0cf8b17b4363f4d0c304235c3c26049b074305a2a136e61b07 not found: ID does not exist" containerID="81f172d754ffac0cf8b17b4363f4d0c304235c3c26049b074305a2a136e61b07" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.681253 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81f172d754ffac0cf8b17b4363f4d0c304235c3c26049b074305a2a136e61b07"} err="failed to get container status \"81f172d754ffac0cf8b17b4363f4d0c304235c3c26049b074305a2a136e61b07\": rpc error: code = NotFound desc = could not find container \"81f172d754ffac0cf8b17b4363f4d0c304235c3c26049b074305a2a136e61b07\": container with ID starting with 81f172d754ffac0cf8b17b4363f4d0c304235c3c26049b074305a2a136e61b07 not found: ID does not exist" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.681265 4730 scope.go:117] "RemoveContainer" containerID="bba06a3492936820d4f5fd2564b0c6431ee8eb57437f23d2663431cc01dc6c47" Sep 30 10:25:18 crc kubenswrapper[4730]: E0930 10:25:18.681666 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bba06a3492936820d4f5fd2564b0c6431ee8eb57437f23d2663431cc01dc6c47\": container with ID starting with bba06a3492936820d4f5fd2564b0c6431ee8eb57437f23d2663431cc01dc6c47 not found: ID does not exist" containerID="bba06a3492936820d4f5fd2564b0c6431ee8eb57437f23d2663431cc01dc6c47" Sep 30 10:25:18 crc kubenswrapper[4730]: I0930 10:25:18.681698 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bba06a3492936820d4f5fd2564b0c6431ee8eb57437f23d2663431cc01dc6c47"} err="failed to get container status \"bba06a3492936820d4f5fd2564b0c6431ee8eb57437f23d2663431cc01dc6c47\": rpc error: code = NotFound desc = could not find container \"bba06a3492936820d4f5fd2564b0c6431ee8eb57437f23d2663431cc01dc6c47\": container with ID starting with bba06a3492936820d4f5fd2564b0c6431ee8eb57437f23d2663431cc01dc6c47 not found: ID does not exist" Sep 30 10:25:20 crc kubenswrapper[4730]: I0930 10:25:20.390717 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9ba3192-5498-4e8e-bc2c-e54f2627c51e" path="/var/lib/kubelet/pods/a9ba3192-5498-4e8e-bc2c-e54f2627c51e/volumes" Sep 30 10:25:23 crc kubenswrapper[4730]: I0930 10:25:23.518495 4730 scope.go:117] "RemoveContainer" containerID="4d9d26c90b0435e3b1eb2acbf25b22f2970a3c36aa3001181751f254715e90c7" Sep 30 10:25:23 crc kubenswrapper[4730]: I0930 10:25:23.558448 4730 scope.go:117] "RemoveContainer" containerID="31d1cc5268f922bb3dc96527d76a66c3fee7c866b6b2b49be2eeefe1682348b2" Sep 30 10:25:23 crc kubenswrapper[4730]: I0930 10:25:23.712672 4730 scope.go:117] "RemoveContainer" containerID="6d4a0ffdefdd28b27e93c3e105638672bb45fab240df9d1c04536e5d96da363b" Sep 30 10:25:23 crc kubenswrapper[4730]: I0930 10:25:23.922358 4730 scope.go:117] "RemoveContainer" containerID="ed706224189e623dcca846ac44bd976a99861004af3c3ec6cc610492e737eb33" Sep 30 10:25:23 crc kubenswrapper[4730]: I0930 10:25:23.984743 4730 scope.go:117] "RemoveContainer" containerID="f83434abfe73661a9c41e0686aa93032d0e514b8a5753e3b44b7129b315fc57e" Sep 30 10:25:24 crc kubenswrapper[4730]: I0930 10:25:24.015147 4730 scope.go:117] "RemoveContainer" containerID="b469053f9677578eaa1c30c35d47312338c5905f70d09701dbcfc318e073cbb9" Sep 30 10:25:24 crc kubenswrapper[4730]: I0930 10:25:24.049679 4730 scope.go:117] "RemoveContainer" containerID="c4cb6a2ecd20b17c3d3be299b076b15ae067be9a54441b204d478c285ee5e6c8" Sep 30 10:25:24 crc kubenswrapper[4730]: I0930 10:25:24.075928 4730 scope.go:117] "RemoveContainer" containerID="24fa7aae52a8f9e3dcade1ba505d869de59aedb4b1dd755a3f0cb7e9b28ea0ed" Sep 30 10:25:24 crc kubenswrapper[4730]: I0930 10:25:24.121732 4730 scope.go:117] "RemoveContainer" containerID="f6a311497fee41998076d4bcaae584e4852882de010269bf4f4e4322b0ab9cd7" Sep 30 10:25:24 crc kubenswrapper[4730]: I0930 10:25:24.151502 4730 scope.go:117] "RemoveContainer" containerID="e07ea6d734f17ae651a76201ba3c6c4296a3176aa665456a828480cc6a95e800" Sep 30 10:25:32 crc kubenswrapper[4730]: I0930 10:25:32.337066 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:25:32 crc kubenswrapper[4730]: I0930 10:25:32.337371 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:26:02 crc kubenswrapper[4730]: I0930 10:26:02.336862 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:26:02 crc kubenswrapper[4730]: I0930 10:26:02.337417 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:26:02 crc kubenswrapper[4730]: I0930 10:26:02.337467 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 10:26:02 crc kubenswrapper[4730]: I0930 10:26:02.338239 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 10:26:02 crc kubenswrapper[4730]: I0930 10:26:02.338294 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" gracePeriod=600 Sep 30 10:26:02 crc kubenswrapper[4730]: E0930 10:26:02.517788 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:26:02 crc kubenswrapper[4730]: I0930 10:26:02.955928 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" exitCode=0 Sep 30 10:26:02 crc kubenswrapper[4730]: I0930 10:26:02.955982 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347"} Sep 30 10:26:02 crc kubenswrapper[4730]: I0930 10:26:02.956021 4730 scope.go:117] "RemoveContainer" containerID="eaa28c0322c72a3838f60d339bc131399ae00513f606d27c5a12509ba2142b15" Sep 30 10:26:02 crc kubenswrapper[4730]: I0930 10:26:02.956665 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:26:02 crc kubenswrapper[4730]: E0930 10:26:02.956949 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:26:17 crc kubenswrapper[4730]: I0930 10:26:17.381197 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:26:17 crc kubenswrapper[4730]: E0930 10:26:17.382063 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:26:31 crc kubenswrapper[4730]: I0930 10:26:31.381264 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:26:31 crc kubenswrapper[4730]: E0930 10:26:31.381995 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:26:39 crc kubenswrapper[4730]: I0930 10:26:39.255356 4730 generic.go:334] "Generic (PLEG): container finished" podID="96923f77-1ffc-4d73-adf3-33f66499e0f9" containerID="0e6893565ada900c40fd32ee7f045d6564260db362e924074b98852ed9704aa3" exitCode=0 Sep 30 10:26:39 crc kubenswrapper[4730]: I0930 10:26:39.255436 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" event={"ID":"96923f77-1ffc-4d73-adf3-33f66499e0f9","Type":"ContainerDied","Data":"0e6893565ada900c40fd32ee7f045d6564260db362e924074b98852ed9704aa3"} Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.678715 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.789482 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-bootstrap-combined-ca-bundle\") pod \"96923f77-1ffc-4d73-adf3-33f66499e0f9\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.789640 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-inventory\") pod \"96923f77-1ffc-4d73-adf3-33f66499e0f9\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.789836 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-ceph\") pod \"96923f77-1ffc-4d73-adf3-33f66499e0f9\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.789890 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-ssh-key\") pod \"96923f77-1ffc-4d73-adf3-33f66499e0f9\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.790003 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65bxm\" (UniqueName: \"kubernetes.io/projected/96923f77-1ffc-4d73-adf3-33f66499e0f9-kube-api-access-65bxm\") pod \"96923f77-1ffc-4d73-adf3-33f66499e0f9\" (UID: \"96923f77-1ffc-4d73-adf3-33f66499e0f9\") " Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.796291 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96923f77-1ffc-4d73-adf3-33f66499e0f9-kube-api-access-65bxm" (OuterVolumeSpecName: "kube-api-access-65bxm") pod "96923f77-1ffc-4d73-adf3-33f66499e0f9" (UID: "96923f77-1ffc-4d73-adf3-33f66499e0f9"). InnerVolumeSpecName "kube-api-access-65bxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.796413 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "96923f77-1ffc-4d73-adf3-33f66499e0f9" (UID: "96923f77-1ffc-4d73-adf3-33f66499e0f9"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.807893 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-ceph" (OuterVolumeSpecName: "ceph") pod "96923f77-1ffc-4d73-adf3-33f66499e0f9" (UID: "96923f77-1ffc-4d73-adf3-33f66499e0f9"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.818821 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "96923f77-1ffc-4d73-adf3-33f66499e0f9" (UID: "96923f77-1ffc-4d73-adf3-33f66499e0f9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.819193 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-inventory" (OuterVolumeSpecName: "inventory") pod "96923f77-1ffc-4d73-adf3-33f66499e0f9" (UID: "96923f77-1ffc-4d73-adf3-33f66499e0f9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.892240 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.892280 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.892295 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65bxm\" (UniqueName: \"kubernetes.io/projected/96923f77-1ffc-4d73-adf3-33f66499e0f9-kube-api-access-65bxm\") on node \"crc\" DevicePath \"\"" Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.892308 4730 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:26:40 crc kubenswrapper[4730]: I0930 10:26:40.892323 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96923f77-1ffc-4d73-adf3-33f66499e0f9-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.273067 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" event={"ID":"96923f77-1ffc-4d73-adf3-33f66499e0f9","Type":"ContainerDied","Data":"c18c9ecc87820f6d73a01d5c31ee7e65b9bbb33f9611c2799040ad8fe18cda98"} Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.273109 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c18c9ecc87820f6d73a01d5c31ee7e65b9bbb33f9611c2799040ad8fe18cda98" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.273127 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.361791 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl"] Sep 30 10:26:41 crc kubenswrapper[4730]: E0930 10:26:41.362417 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96923f77-1ffc-4d73-adf3-33f66499e0f9" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.362433 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="96923f77-1ffc-4d73-adf3-33f66499e0f9" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 10:26:41 crc kubenswrapper[4730]: E0930 10:26:41.362453 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ba3192-5498-4e8e-bc2c-e54f2627c51e" containerName="extract-content" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.362459 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ba3192-5498-4e8e-bc2c-e54f2627c51e" containerName="extract-content" Sep 30 10:26:41 crc kubenswrapper[4730]: E0930 10:26:41.362474 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ba3192-5498-4e8e-bc2c-e54f2627c51e" containerName="extract-utilities" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.362480 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ba3192-5498-4e8e-bc2c-e54f2627c51e" containerName="extract-utilities" Sep 30 10:26:41 crc kubenswrapper[4730]: E0930 10:26:41.362493 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cad3aa75-af34-4dc9-b6fd-9e675e4812f2" containerName="extract-utilities" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.362498 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cad3aa75-af34-4dc9-b6fd-9e675e4812f2" containerName="extract-utilities" Sep 30 10:26:41 crc kubenswrapper[4730]: E0930 10:26:41.362511 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ba3192-5498-4e8e-bc2c-e54f2627c51e" containerName="registry-server" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.362518 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ba3192-5498-4e8e-bc2c-e54f2627c51e" containerName="registry-server" Sep 30 10:26:41 crc kubenswrapper[4730]: E0930 10:26:41.362530 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cad3aa75-af34-4dc9-b6fd-9e675e4812f2" containerName="registry-server" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.362535 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cad3aa75-af34-4dc9-b6fd-9e675e4812f2" containerName="registry-server" Sep 30 10:26:41 crc kubenswrapper[4730]: E0930 10:26:41.362548 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cad3aa75-af34-4dc9-b6fd-9e675e4812f2" containerName="extract-content" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.362554 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cad3aa75-af34-4dc9-b6fd-9e675e4812f2" containerName="extract-content" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.362754 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="cad3aa75-af34-4dc9-b6fd-9e675e4812f2" containerName="registry-server" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.362774 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="96923f77-1ffc-4d73-adf3-33f66499e0f9" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.362786 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9ba3192-5498-4e8e-bc2c-e54f2627c51e" containerName="registry-server" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.363445 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.368220 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.368247 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.368215 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.368339 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.369432 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.371844 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl"] Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.402686 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t8bk\" (UniqueName: \"kubernetes.io/projected/076a53c1-4f43-4c11-b67a-163d1fe06287-kube-api-access-7t8bk\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.402770 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.402839 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.402888 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.504351 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.504558 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.504609 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.504766 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t8bk\" (UniqueName: \"kubernetes.io/projected/076a53c1-4f43-4c11-b67a-163d1fe06287-kube-api-access-7t8bk\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.509129 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.509421 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.510425 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.527239 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t8bk\" (UniqueName: \"kubernetes.io/projected/076a53c1-4f43-4c11-b67a-163d1fe06287-kube-api-access-7t8bk\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:41 crc kubenswrapper[4730]: I0930 10:26:41.689122 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:26:42 crc kubenswrapper[4730]: I0930 10:26:42.219306 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl"] Sep 30 10:26:42 crc kubenswrapper[4730]: I0930 10:26:42.282052 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" event={"ID":"076a53c1-4f43-4c11-b67a-163d1fe06287","Type":"ContainerStarted","Data":"f5cf565d9dc0f59b4b8a027f98946e5b7c927c63223ad7a6c9507b7e436dfb08"} Sep 30 10:26:43 crc kubenswrapper[4730]: I0930 10:26:43.291290 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" event={"ID":"076a53c1-4f43-4c11-b67a-163d1fe06287","Type":"ContainerStarted","Data":"919bcb9cd5135cf1a51b5746e72ea185696278a152cab20a17dd2a9b910f337b"} Sep 30 10:26:43 crc kubenswrapper[4730]: I0930 10:26:43.317965 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" podStartSLOduration=1.873121748 podStartE2EDuration="2.317942124s" podCreationTimestamp="2025-09-30 10:26:41 +0000 UTC" firstStartedPulling="2025-09-30 10:26:42.220102647 +0000 UTC m=+2246.553362640" lastFinishedPulling="2025-09-30 10:26:42.664923023 +0000 UTC m=+2246.998183016" observedRunningTime="2025-09-30 10:26:43.310190852 +0000 UTC m=+2247.643450845" watchObservedRunningTime="2025-09-30 10:26:43.317942124 +0000 UTC m=+2247.651202117" Sep 30 10:26:44 crc kubenswrapper[4730]: I0930 10:26:44.380816 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:26:44 crc kubenswrapper[4730]: E0930 10:26:44.381306 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:26:56 crc kubenswrapper[4730]: I0930 10:26:56.388438 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:26:56 crc kubenswrapper[4730]: E0930 10:26:56.389174 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:27:08 crc kubenswrapper[4730]: I0930 10:27:08.497638 4730 generic.go:334] "Generic (PLEG): container finished" podID="076a53c1-4f43-4c11-b67a-163d1fe06287" containerID="919bcb9cd5135cf1a51b5746e72ea185696278a152cab20a17dd2a9b910f337b" exitCode=0 Sep 30 10:27:08 crc kubenswrapper[4730]: I0930 10:27:08.497651 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" event={"ID":"076a53c1-4f43-4c11-b67a-163d1fe06287","Type":"ContainerDied","Data":"919bcb9cd5135cf1a51b5746e72ea185696278a152cab20a17dd2a9b910f337b"} Sep 30 10:27:09 crc kubenswrapper[4730]: I0930 10:27:09.915446 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.000641 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-inventory\") pod \"076a53c1-4f43-4c11-b67a-163d1fe06287\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.000742 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-ssh-key\") pod \"076a53c1-4f43-4c11-b67a-163d1fe06287\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.000764 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-ceph\") pod \"076a53c1-4f43-4c11-b67a-163d1fe06287\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.000959 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7t8bk\" (UniqueName: \"kubernetes.io/projected/076a53c1-4f43-4c11-b67a-163d1fe06287-kube-api-access-7t8bk\") pod \"076a53c1-4f43-4c11-b67a-163d1fe06287\" (UID: \"076a53c1-4f43-4c11-b67a-163d1fe06287\") " Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.006200 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-ceph" (OuterVolumeSpecName: "ceph") pod "076a53c1-4f43-4c11-b67a-163d1fe06287" (UID: "076a53c1-4f43-4c11-b67a-163d1fe06287"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.006314 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/076a53c1-4f43-4c11-b67a-163d1fe06287-kube-api-access-7t8bk" (OuterVolumeSpecName: "kube-api-access-7t8bk") pod "076a53c1-4f43-4c11-b67a-163d1fe06287" (UID: "076a53c1-4f43-4c11-b67a-163d1fe06287"). InnerVolumeSpecName "kube-api-access-7t8bk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.029953 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "076a53c1-4f43-4c11-b67a-163d1fe06287" (UID: "076a53c1-4f43-4c11-b67a-163d1fe06287"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.034493 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-inventory" (OuterVolumeSpecName: "inventory") pod "076a53c1-4f43-4c11-b67a-163d1fe06287" (UID: "076a53c1-4f43-4c11-b67a-163d1fe06287"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.103169 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.103508 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.103677 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7t8bk\" (UniqueName: \"kubernetes.io/projected/076a53c1-4f43-4c11-b67a-163d1fe06287-kube-api-access-7t8bk\") on node \"crc\" DevicePath \"\"" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.103787 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/076a53c1-4f43-4c11-b67a-163d1fe06287-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.381969 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:27:10 crc kubenswrapper[4730]: E0930 10:27:10.382241 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.528032 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" event={"ID":"076a53c1-4f43-4c11-b67a-163d1fe06287","Type":"ContainerDied","Data":"f5cf565d9dc0f59b4b8a027f98946e5b7c927c63223ad7a6c9507b7e436dfb08"} Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.528573 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5cf565d9dc0f59b4b8a027f98946e5b7c927c63223ad7a6c9507b7e436dfb08" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.528697 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.596757 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2"] Sep 30 10:27:10 crc kubenswrapper[4730]: E0930 10:27:10.597344 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="076a53c1-4f43-4c11-b67a-163d1fe06287" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.597446 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="076a53c1-4f43-4c11-b67a-163d1fe06287" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.597779 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="076a53c1-4f43-4c11-b67a-163d1fe06287" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.598667 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.601527 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.601654 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.602049 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.604162 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.604273 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.610353 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2"] Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.722561 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.722662 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sdd9\" (UniqueName: \"kubernetes.io/projected/60081e7d-07fd-48ac-a4ae-46f05ab4d935-kube-api-access-2sdd9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.722923 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.723006 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.825438 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.825508 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sdd9\" (UniqueName: \"kubernetes.io/projected/60081e7d-07fd-48ac-a4ae-46f05ab4d935-kube-api-access-2sdd9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.825564 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.825585 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.831075 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.832258 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.833109 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.847820 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sdd9\" (UniqueName: \"kubernetes.io/projected/60081e7d-07fd-48ac-a4ae-46f05ab4d935-kube-api-access-2sdd9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:10 crc kubenswrapper[4730]: I0930 10:27:10.918972 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:11 crc kubenswrapper[4730]: I0930 10:27:11.418107 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2"] Sep 30 10:27:11 crc kubenswrapper[4730]: I0930 10:27:11.539174 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" event={"ID":"60081e7d-07fd-48ac-a4ae-46f05ab4d935","Type":"ContainerStarted","Data":"ebea83e8e52d208f06c0361dcf5ff35a971f27e6fba5c440cd3a63734b6ecfef"} Sep 30 10:27:12 crc kubenswrapper[4730]: I0930 10:27:12.547439 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" event={"ID":"60081e7d-07fd-48ac-a4ae-46f05ab4d935","Type":"ContainerStarted","Data":"a3bac59b8ccb41f9782a1cb143dc5ac4f628474c7ab0205a7c129e79f5f0f42e"} Sep 30 10:27:12 crc kubenswrapper[4730]: I0930 10:27:12.571124 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" podStartSLOduration=2.044075756 podStartE2EDuration="2.571103619s" podCreationTimestamp="2025-09-30 10:27:10 +0000 UTC" firstStartedPulling="2025-09-30 10:27:11.430370477 +0000 UTC m=+2275.763630480" lastFinishedPulling="2025-09-30 10:27:11.95739835 +0000 UTC m=+2276.290658343" observedRunningTime="2025-09-30 10:27:12.562280589 +0000 UTC m=+2276.895540602" watchObservedRunningTime="2025-09-30 10:27:12.571103619 +0000 UTC m=+2276.904363612" Sep 30 10:27:17 crc kubenswrapper[4730]: I0930 10:27:17.585567 4730 generic.go:334] "Generic (PLEG): container finished" podID="60081e7d-07fd-48ac-a4ae-46f05ab4d935" containerID="a3bac59b8ccb41f9782a1cb143dc5ac4f628474c7ab0205a7c129e79f5f0f42e" exitCode=0 Sep 30 10:27:17 crc kubenswrapper[4730]: I0930 10:27:17.585648 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" event={"ID":"60081e7d-07fd-48ac-a4ae-46f05ab4d935","Type":"ContainerDied","Data":"a3bac59b8ccb41f9782a1cb143dc5ac4f628474c7ab0205a7c129e79f5f0f42e"} Sep 30 10:27:18 crc kubenswrapper[4730]: I0930 10:27:18.990144 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.088911 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-inventory\") pod \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.088991 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-ssh-key\") pod \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.089067 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2sdd9\" (UniqueName: \"kubernetes.io/projected/60081e7d-07fd-48ac-a4ae-46f05ab4d935-kube-api-access-2sdd9\") pod \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.089096 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-ceph\") pod \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\" (UID: \"60081e7d-07fd-48ac-a4ae-46f05ab4d935\") " Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.095227 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-ceph" (OuterVolumeSpecName: "ceph") pod "60081e7d-07fd-48ac-a4ae-46f05ab4d935" (UID: "60081e7d-07fd-48ac-a4ae-46f05ab4d935"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.095714 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60081e7d-07fd-48ac-a4ae-46f05ab4d935-kube-api-access-2sdd9" (OuterVolumeSpecName: "kube-api-access-2sdd9") pod "60081e7d-07fd-48ac-a4ae-46f05ab4d935" (UID: "60081e7d-07fd-48ac-a4ae-46f05ab4d935"). InnerVolumeSpecName "kube-api-access-2sdd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.122467 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "60081e7d-07fd-48ac-a4ae-46f05ab4d935" (UID: "60081e7d-07fd-48ac-a4ae-46f05ab4d935"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.125815 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-inventory" (OuterVolumeSpecName: "inventory") pod "60081e7d-07fd-48ac-a4ae-46f05ab4d935" (UID: "60081e7d-07fd-48ac-a4ae-46f05ab4d935"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.192239 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.192388 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.192470 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2sdd9\" (UniqueName: \"kubernetes.io/projected/60081e7d-07fd-48ac-a4ae-46f05ab4d935-kube-api-access-2sdd9\") on node \"crc\" DevicePath \"\"" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.192537 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/60081e7d-07fd-48ac-a4ae-46f05ab4d935-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.603897 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" event={"ID":"60081e7d-07fd-48ac-a4ae-46f05ab4d935","Type":"ContainerDied","Data":"ebea83e8e52d208f06c0361dcf5ff35a971f27e6fba5c440cd3a63734b6ecfef"} Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.604207 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebea83e8e52d208f06c0361dcf5ff35a971f27e6fba5c440cd3a63734b6ecfef" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.603996 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.673498 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq"] Sep 30 10:27:19 crc kubenswrapper[4730]: E0930 10:27:19.674149 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60081e7d-07fd-48ac-a4ae-46f05ab4d935" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.674233 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="60081e7d-07fd-48ac-a4ae-46f05ab4d935" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.674539 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="60081e7d-07fd-48ac-a4ae-46f05ab4d935" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.675312 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.677679 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.678035 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.678398 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.678708 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.680760 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.686587 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq"] Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.805834 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-xhgzq\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.805961 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2twb\" (UniqueName: \"kubernetes.io/projected/9e488e17-15bf-414e-b0cb-e5b3dbf22769-kube-api-access-d2twb\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-xhgzq\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.806163 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-xhgzq\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.806226 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-xhgzq\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.907452 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-xhgzq\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.907525 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-xhgzq\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.907691 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-xhgzq\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.907757 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2twb\" (UniqueName: \"kubernetes.io/projected/9e488e17-15bf-414e-b0cb-e5b3dbf22769-kube-api-access-d2twb\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-xhgzq\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.912185 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-xhgzq\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.912872 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-xhgzq\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.913002 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-xhgzq\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.922912 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2twb\" (UniqueName: \"kubernetes.io/projected/9e488e17-15bf-414e-b0cb-e5b3dbf22769-kube-api-access-d2twb\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-xhgzq\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:19 crc kubenswrapper[4730]: I0930 10:27:19.993160 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:20 crc kubenswrapper[4730]: I0930 10:27:20.486189 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq"] Sep 30 10:27:20 crc kubenswrapper[4730]: I0930 10:27:20.611840 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" event={"ID":"9e488e17-15bf-414e-b0cb-e5b3dbf22769","Type":"ContainerStarted","Data":"7e45b022e442a510c494470f43ac209564ae7a0758db048bdd5ae37dd271e34a"} Sep 30 10:27:21 crc kubenswrapper[4730]: I0930 10:27:21.621640 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" event={"ID":"9e488e17-15bf-414e-b0cb-e5b3dbf22769","Type":"ContainerStarted","Data":"a57f2d11265a11bbda4217c0f36937cc6129a16b0a5ff264134780a045f3e0c0"} Sep 30 10:27:21 crc kubenswrapper[4730]: I0930 10:27:21.642474 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" podStartSLOduration=1.947890146 podStartE2EDuration="2.642456216s" podCreationTimestamp="2025-09-30 10:27:19 +0000 UTC" firstStartedPulling="2025-09-30 10:27:20.495377249 +0000 UTC m=+2284.828637232" lastFinishedPulling="2025-09-30 10:27:21.189943309 +0000 UTC m=+2285.523203302" observedRunningTime="2025-09-30 10:27:21.634861909 +0000 UTC m=+2285.968121912" watchObservedRunningTime="2025-09-30 10:27:21.642456216 +0000 UTC m=+2285.975716209" Sep 30 10:27:22 crc kubenswrapper[4730]: I0930 10:27:22.381016 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:27:22 crc kubenswrapper[4730]: E0930 10:27:22.381281 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:27:34 crc kubenswrapper[4730]: I0930 10:27:34.381192 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:27:34 crc kubenswrapper[4730]: E0930 10:27:34.381912 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:27:48 crc kubenswrapper[4730]: I0930 10:27:48.381511 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:27:48 crc kubenswrapper[4730]: E0930 10:27:48.382457 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:27:56 crc kubenswrapper[4730]: I0930 10:27:56.958878 4730 generic.go:334] "Generic (PLEG): container finished" podID="9e488e17-15bf-414e-b0cb-e5b3dbf22769" containerID="a57f2d11265a11bbda4217c0f36937cc6129a16b0a5ff264134780a045f3e0c0" exitCode=0 Sep 30 10:27:56 crc kubenswrapper[4730]: I0930 10:27:56.959080 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" event={"ID":"9e488e17-15bf-414e-b0cb-e5b3dbf22769","Type":"ContainerDied","Data":"a57f2d11265a11bbda4217c0f36937cc6129a16b0a5ff264134780a045f3e0c0"} Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.395796 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.514157 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2twb\" (UniqueName: \"kubernetes.io/projected/9e488e17-15bf-414e-b0cb-e5b3dbf22769-kube-api-access-d2twb\") pod \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.514360 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-ceph\") pod \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.514466 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-inventory\") pod \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.514514 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-ssh-key\") pod \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\" (UID: \"9e488e17-15bf-414e-b0cb-e5b3dbf22769\") " Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.521633 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-ceph" (OuterVolumeSpecName: "ceph") pod "9e488e17-15bf-414e-b0cb-e5b3dbf22769" (UID: "9e488e17-15bf-414e-b0cb-e5b3dbf22769"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.522493 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e488e17-15bf-414e-b0cb-e5b3dbf22769-kube-api-access-d2twb" (OuterVolumeSpecName: "kube-api-access-d2twb") pod "9e488e17-15bf-414e-b0cb-e5b3dbf22769" (UID: "9e488e17-15bf-414e-b0cb-e5b3dbf22769"). InnerVolumeSpecName "kube-api-access-d2twb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.548139 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9e488e17-15bf-414e-b0cb-e5b3dbf22769" (UID: "9e488e17-15bf-414e-b0cb-e5b3dbf22769"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.550848 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-inventory" (OuterVolumeSpecName: "inventory") pod "9e488e17-15bf-414e-b0cb-e5b3dbf22769" (UID: "9e488e17-15bf-414e-b0cb-e5b3dbf22769"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.616463 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2twb\" (UniqueName: \"kubernetes.io/projected/9e488e17-15bf-414e-b0cb-e5b3dbf22769-kube-api-access-d2twb\") on node \"crc\" DevicePath \"\"" Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.616500 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.616509 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.616518 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e488e17-15bf-414e-b0cb-e5b3dbf22769-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.978440 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" event={"ID":"9e488e17-15bf-414e-b0cb-e5b3dbf22769","Type":"ContainerDied","Data":"7e45b022e442a510c494470f43ac209564ae7a0758db048bdd5ae37dd271e34a"} Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.978501 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e45b022e442a510c494470f43ac209564ae7a0758db048bdd5ae37dd271e34a" Sep 30 10:27:58 crc kubenswrapper[4730]: I0930 10:27:58.978708 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-xhgzq" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.060351 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s"] Sep 30 10:27:59 crc kubenswrapper[4730]: E0930 10:27:59.060817 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e488e17-15bf-414e-b0cb-e5b3dbf22769" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.060839 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e488e17-15bf-414e-b0cb-e5b3dbf22769" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.061036 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e488e17-15bf-414e-b0cb-e5b3dbf22769" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.061730 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.065738 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.066207 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.066407 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.066558 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.067339 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.073394 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s"] Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.127047 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.127222 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.127281 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.127322 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5s4md\" (UniqueName: \"kubernetes.io/projected/15b0b939-744d-4ea8-8ff3-942843d32348-kube-api-access-5s4md\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.228976 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.229059 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.229091 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.229126 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5s4md\" (UniqueName: \"kubernetes.io/projected/15b0b939-744d-4ea8-8ff3-942843d32348-kube-api-access-5s4md\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.234308 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.234308 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.234594 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.247603 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5s4md\" (UniqueName: \"kubernetes.io/projected/15b0b939-744d-4ea8-8ff3-942843d32348-kube-api-access-5s4md\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.380632 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:27:59 crc kubenswrapper[4730]: E0930 10:27:59.381195 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.396484 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.951771 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s"] Sep 30 10:27:59 crc kubenswrapper[4730]: I0930 10:27:59.988887 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" event={"ID":"15b0b939-744d-4ea8-8ff3-942843d32348","Type":"ContainerStarted","Data":"c56afa800cd6af7474ee4669c6e2c4a9d568b259cecb681bc2059fd0f8b808f3"} Sep 30 10:28:00 crc kubenswrapper[4730]: I0930 10:28:00.998504 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" event={"ID":"15b0b939-744d-4ea8-8ff3-942843d32348","Type":"ContainerStarted","Data":"2b83b3e498803058cd825574d35c4b5684c97e39ca4098801c8662b3e5d166a1"} Sep 30 10:28:01 crc kubenswrapper[4730]: I0930 10:28:01.022263 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" podStartSLOduration=1.457232938 podStartE2EDuration="2.022241499s" podCreationTimestamp="2025-09-30 10:27:59 +0000 UTC" firstStartedPulling="2025-09-30 10:27:59.959348531 +0000 UTC m=+2324.292608524" lastFinishedPulling="2025-09-30 10:28:00.524357092 +0000 UTC m=+2324.857617085" observedRunningTime="2025-09-30 10:28:01.015027991 +0000 UTC m=+2325.348287984" watchObservedRunningTime="2025-09-30 10:28:01.022241499 +0000 UTC m=+2325.355501492" Sep 30 10:28:05 crc kubenswrapper[4730]: I0930 10:28:05.033065 4730 generic.go:334] "Generic (PLEG): container finished" podID="15b0b939-744d-4ea8-8ff3-942843d32348" containerID="2b83b3e498803058cd825574d35c4b5684c97e39ca4098801c8662b3e5d166a1" exitCode=0 Sep 30 10:28:05 crc kubenswrapper[4730]: I0930 10:28:05.033763 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" event={"ID":"15b0b939-744d-4ea8-8ff3-942843d32348","Type":"ContainerDied","Data":"2b83b3e498803058cd825574d35c4b5684c97e39ca4098801c8662b3e5d166a1"} Sep 30 10:28:06 crc kubenswrapper[4730]: I0930 10:28:06.441184 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:28:06 crc kubenswrapper[4730]: I0930 10:28:06.612299 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5s4md\" (UniqueName: \"kubernetes.io/projected/15b0b939-744d-4ea8-8ff3-942843d32348-kube-api-access-5s4md\") pod \"15b0b939-744d-4ea8-8ff3-942843d32348\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " Sep 30 10:28:06 crc kubenswrapper[4730]: I0930 10:28:06.612374 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-ssh-key\") pod \"15b0b939-744d-4ea8-8ff3-942843d32348\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " Sep 30 10:28:06 crc kubenswrapper[4730]: I0930 10:28:06.612542 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-ceph\") pod \"15b0b939-744d-4ea8-8ff3-942843d32348\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " Sep 30 10:28:06 crc kubenswrapper[4730]: I0930 10:28:06.612594 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-inventory\") pod \"15b0b939-744d-4ea8-8ff3-942843d32348\" (UID: \"15b0b939-744d-4ea8-8ff3-942843d32348\") " Sep 30 10:28:06 crc kubenswrapper[4730]: I0930 10:28:06.620852 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-ceph" (OuterVolumeSpecName: "ceph") pod "15b0b939-744d-4ea8-8ff3-942843d32348" (UID: "15b0b939-744d-4ea8-8ff3-942843d32348"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:28:06 crc kubenswrapper[4730]: I0930 10:28:06.622072 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15b0b939-744d-4ea8-8ff3-942843d32348-kube-api-access-5s4md" (OuterVolumeSpecName: "kube-api-access-5s4md") pod "15b0b939-744d-4ea8-8ff3-942843d32348" (UID: "15b0b939-744d-4ea8-8ff3-942843d32348"). InnerVolumeSpecName "kube-api-access-5s4md". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:28:06 crc kubenswrapper[4730]: I0930 10:28:06.640375 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "15b0b939-744d-4ea8-8ff3-942843d32348" (UID: "15b0b939-744d-4ea8-8ff3-942843d32348"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:28:06 crc kubenswrapper[4730]: I0930 10:28:06.640799 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-inventory" (OuterVolumeSpecName: "inventory") pod "15b0b939-744d-4ea8-8ff3-942843d32348" (UID: "15b0b939-744d-4ea8-8ff3-942843d32348"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:28:06 crc kubenswrapper[4730]: I0930 10:28:06.715148 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:28:06 crc kubenswrapper[4730]: I0930 10:28:06.715468 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:28:06 crc kubenswrapper[4730]: I0930 10:28:06.715485 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5s4md\" (UniqueName: \"kubernetes.io/projected/15b0b939-744d-4ea8-8ff3-942843d32348-kube-api-access-5s4md\") on node \"crc\" DevicePath \"\"" Sep 30 10:28:06 crc kubenswrapper[4730]: I0930 10:28:06.715496 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/15b0b939-744d-4ea8-8ff3-942843d32348-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.053927 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" event={"ID":"15b0b939-744d-4ea8-8ff3-942843d32348","Type":"ContainerDied","Data":"c56afa800cd6af7474ee4669c6e2c4a9d568b259cecb681bc2059fd0f8b808f3"} Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.053962 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.053974 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c56afa800cd6af7474ee4669c6e2c4a9d568b259cecb681bc2059fd0f8b808f3" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.122353 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8"] Sep 30 10:28:07 crc kubenswrapper[4730]: E0930 10:28:07.122743 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15b0b939-744d-4ea8-8ff3-942843d32348" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.122760 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="15b0b939-744d-4ea8-8ff3-942843d32348" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.122940 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="15b0b939-744d-4ea8-8ff3-942843d32348" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.123560 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.125299 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.126475 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.127521 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.127907 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.130729 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.142599 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8"] Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.326366 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc95w\" (UniqueName: \"kubernetes.io/projected/64074625-ee39-4163-afbf-bc8e220b63e7-kube-api-access-pc95w\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.326453 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.326507 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.326545 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.427628 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.427683 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.427721 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.427800 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc95w\" (UniqueName: \"kubernetes.io/projected/64074625-ee39-4163-afbf-bc8e220b63e7-kube-api-access-pc95w\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.434331 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.435424 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.435466 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.444714 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc95w\" (UniqueName: \"kubernetes.io/projected/64074625-ee39-4163-afbf-bc8e220b63e7-kube-api-access-pc95w\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:07 crc kubenswrapper[4730]: I0930 10:28:07.740478 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:08 crc kubenswrapper[4730]: I0930 10:28:08.205667 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8"] Sep 30 10:28:08 crc kubenswrapper[4730]: W0930 10:28:08.209449 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64074625_ee39_4163_afbf_bc8e220b63e7.slice/crio-00a01dbc56474bcda649dfa41a556aa98390be2c4887abbe044a248d0d18adb5 WatchSource:0}: Error finding container 00a01dbc56474bcda649dfa41a556aa98390be2c4887abbe044a248d0d18adb5: Status 404 returned error can't find the container with id 00a01dbc56474bcda649dfa41a556aa98390be2c4887abbe044a248d0d18adb5 Sep 30 10:28:09 crc kubenswrapper[4730]: I0930 10:28:09.069727 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" event={"ID":"64074625-ee39-4163-afbf-bc8e220b63e7","Type":"ContainerStarted","Data":"a0c697ba8d4bc281ec1a9ca8402d6c6ca53976366bd1020646c2e6055dc1b1e8"} Sep 30 10:28:09 crc kubenswrapper[4730]: I0930 10:28:09.070001 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" event={"ID":"64074625-ee39-4163-afbf-bc8e220b63e7","Type":"ContainerStarted","Data":"00a01dbc56474bcda649dfa41a556aa98390be2c4887abbe044a248d0d18adb5"} Sep 30 10:28:09 crc kubenswrapper[4730]: I0930 10:28:09.090644 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" podStartSLOduration=1.676595206 podStartE2EDuration="2.090600708s" podCreationTimestamp="2025-09-30 10:28:07 +0000 UTC" firstStartedPulling="2025-09-30 10:28:08.211724658 +0000 UTC m=+2332.544984651" lastFinishedPulling="2025-09-30 10:28:08.62573016 +0000 UTC m=+2332.958990153" observedRunningTime="2025-09-30 10:28:09.083491592 +0000 UTC m=+2333.416751595" watchObservedRunningTime="2025-09-30 10:28:09.090600708 +0000 UTC m=+2333.423860701" Sep 30 10:28:13 crc kubenswrapper[4730]: I0930 10:28:13.381512 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:28:13 crc kubenswrapper[4730]: E0930 10:28:13.382128 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:28:27 crc kubenswrapper[4730]: I0930 10:28:27.380992 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:28:27 crc kubenswrapper[4730]: E0930 10:28:27.382932 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:28:38 crc kubenswrapper[4730]: I0930 10:28:38.380816 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:28:38 crc kubenswrapper[4730]: E0930 10:28:38.381815 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:28:49 crc kubenswrapper[4730]: I0930 10:28:49.398737 4730 generic.go:334] "Generic (PLEG): container finished" podID="64074625-ee39-4163-afbf-bc8e220b63e7" containerID="a0c697ba8d4bc281ec1a9ca8402d6c6ca53976366bd1020646c2e6055dc1b1e8" exitCode=0 Sep 30 10:28:49 crc kubenswrapper[4730]: I0930 10:28:49.398881 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" event={"ID":"64074625-ee39-4163-afbf-bc8e220b63e7","Type":"ContainerDied","Data":"a0c697ba8d4bc281ec1a9ca8402d6c6ca53976366bd1020646c2e6055dc1b1e8"} Sep 30 10:28:50 crc kubenswrapper[4730]: I0930 10:28:50.779165 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:50 crc kubenswrapper[4730]: I0930 10:28:50.861846 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-inventory\") pod \"64074625-ee39-4163-afbf-bc8e220b63e7\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " Sep 30 10:28:50 crc kubenswrapper[4730]: I0930 10:28:50.862050 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pc95w\" (UniqueName: \"kubernetes.io/projected/64074625-ee39-4163-afbf-bc8e220b63e7-kube-api-access-pc95w\") pod \"64074625-ee39-4163-afbf-bc8e220b63e7\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " Sep 30 10:28:50 crc kubenswrapper[4730]: I0930 10:28:50.862092 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-ssh-key\") pod \"64074625-ee39-4163-afbf-bc8e220b63e7\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " Sep 30 10:28:50 crc kubenswrapper[4730]: I0930 10:28:50.862284 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-ceph\") pod \"64074625-ee39-4163-afbf-bc8e220b63e7\" (UID: \"64074625-ee39-4163-afbf-bc8e220b63e7\") " Sep 30 10:28:50 crc kubenswrapper[4730]: I0930 10:28:50.867350 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64074625-ee39-4163-afbf-bc8e220b63e7-kube-api-access-pc95w" (OuterVolumeSpecName: "kube-api-access-pc95w") pod "64074625-ee39-4163-afbf-bc8e220b63e7" (UID: "64074625-ee39-4163-afbf-bc8e220b63e7"). InnerVolumeSpecName "kube-api-access-pc95w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:28:50 crc kubenswrapper[4730]: I0930 10:28:50.868175 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-ceph" (OuterVolumeSpecName: "ceph") pod "64074625-ee39-4163-afbf-bc8e220b63e7" (UID: "64074625-ee39-4163-afbf-bc8e220b63e7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:28:50 crc kubenswrapper[4730]: I0930 10:28:50.889369 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-inventory" (OuterVolumeSpecName: "inventory") pod "64074625-ee39-4163-afbf-bc8e220b63e7" (UID: "64074625-ee39-4163-afbf-bc8e220b63e7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:28:50 crc kubenswrapper[4730]: I0930 10:28:50.890372 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "64074625-ee39-4163-afbf-bc8e220b63e7" (UID: "64074625-ee39-4163-afbf-bc8e220b63e7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:28:50 crc kubenswrapper[4730]: I0930 10:28:50.964584 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pc95w\" (UniqueName: \"kubernetes.io/projected/64074625-ee39-4163-afbf-bc8e220b63e7-kube-api-access-pc95w\") on node \"crc\" DevicePath \"\"" Sep 30 10:28:50 crc kubenswrapper[4730]: I0930 10:28:50.964632 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:28:50 crc kubenswrapper[4730]: I0930 10:28:50.964641 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:28:50 crc kubenswrapper[4730]: I0930 10:28:50.964651 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64074625-ee39-4163-afbf-bc8e220b63e7-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.380880 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:28:51 crc kubenswrapper[4730]: E0930 10:28:51.381128 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.418203 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" event={"ID":"64074625-ee39-4163-afbf-bc8e220b63e7","Type":"ContainerDied","Data":"00a01dbc56474bcda649dfa41a556aa98390be2c4887abbe044a248d0d18adb5"} Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.418237 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.418244 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00a01dbc56474bcda649dfa41a556aa98390be2c4887abbe044a248d0d18adb5" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.498590 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-2p45l"] Sep 30 10:28:51 crc kubenswrapper[4730]: E0930 10:28:51.499101 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64074625-ee39-4163-afbf-bc8e220b63e7" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.499132 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="64074625-ee39-4163-afbf-bc8e220b63e7" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.499347 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="64074625-ee39-4163-afbf-bc8e220b63e7" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.500187 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.502973 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.503130 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.503275 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.503459 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.503580 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.510557 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-2p45l"] Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.579911 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-2p45l\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.580280 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-2p45l\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.580338 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-ceph\") pod \"ssh-known-hosts-edpm-deployment-2p45l\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.580551 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kng82\" (UniqueName: \"kubernetes.io/projected/9a25d6ab-b556-4812-b876-92f7574e6da9-kube-api-access-kng82\") pod \"ssh-known-hosts-edpm-deployment-2p45l\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.682374 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kng82\" (UniqueName: \"kubernetes.io/projected/9a25d6ab-b556-4812-b876-92f7574e6da9-kube-api-access-kng82\") pod \"ssh-known-hosts-edpm-deployment-2p45l\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.682789 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-2p45l\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.682953 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-2p45l\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.683103 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-ceph\") pod \"ssh-known-hosts-edpm-deployment-2p45l\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.686407 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-2p45l\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.686661 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-2p45l\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.689696 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-ceph\") pod \"ssh-known-hosts-edpm-deployment-2p45l\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.702558 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kng82\" (UniqueName: \"kubernetes.io/projected/9a25d6ab-b556-4812-b876-92f7574e6da9-kube-api-access-kng82\") pod \"ssh-known-hosts-edpm-deployment-2p45l\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:51 crc kubenswrapper[4730]: I0930 10:28:51.875645 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:28:52 crc kubenswrapper[4730]: I0930 10:28:52.418576 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-2p45l"] Sep 30 10:28:52 crc kubenswrapper[4730]: I0930 10:28:52.433121 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" event={"ID":"9a25d6ab-b556-4812-b876-92f7574e6da9","Type":"ContainerStarted","Data":"642ef9748fe4f99e636bb75e00984117f982b14aa4bf418f0a7b4720468c6850"} Sep 30 10:28:53 crc kubenswrapper[4730]: I0930 10:28:53.443214 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" event={"ID":"9a25d6ab-b556-4812-b876-92f7574e6da9","Type":"ContainerStarted","Data":"132123d69dba41b4599b9d9efc1a1a4f22b35045573bc67d4eb9161c2764c0ce"} Sep 30 10:28:53 crc kubenswrapper[4730]: I0930 10:28:53.468319 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" podStartSLOduration=1.940334269 podStartE2EDuration="2.468299241s" podCreationTimestamp="2025-09-30 10:28:51 +0000 UTC" firstStartedPulling="2025-09-30 10:28:52.421188306 +0000 UTC m=+2376.754448299" lastFinishedPulling="2025-09-30 10:28:52.949153288 +0000 UTC m=+2377.282413271" observedRunningTime="2025-09-30 10:28:53.458862585 +0000 UTC m=+2377.792122588" watchObservedRunningTime="2025-09-30 10:28:53.468299241 +0000 UTC m=+2377.801559254" Sep 30 10:29:02 crc kubenswrapper[4730]: I0930 10:29:02.511269 4730 generic.go:334] "Generic (PLEG): container finished" podID="9a25d6ab-b556-4812-b876-92f7574e6da9" containerID="132123d69dba41b4599b9d9efc1a1a4f22b35045573bc67d4eb9161c2764c0ce" exitCode=0 Sep 30 10:29:02 crc kubenswrapper[4730]: I0930 10:29:02.511340 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" event={"ID":"9a25d6ab-b556-4812-b876-92f7574e6da9","Type":"ContainerDied","Data":"132123d69dba41b4599b9d9efc1a1a4f22b35045573bc67d4eb9161c2764c0ce"} Sep 30 10:29:03 crc kubenswrapper[4730]: I0930 10:29:03.897730 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.030797 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-inventory-0\") pod \"9a25d6ab-b556-4812-b876-92f7574e6da9\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.030892 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-ceph\") pod \"9a25d6ab-b556-4812-b876-92f7574e6da9\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.031043 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-ssh-key-openstack-edpm-ipam\") pod \"9a25d6ab-b556-4812-b876-92f7574e6da9\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.031142 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kng82\" (UniqueName: \"kubernetes.io/projected/9a25d6ab-b556-4812-b876-92f7574e6da9-kube-api-access-kng82\") pod \"9a25d6ab-b556-4812-b876-92f7574e6da9\" (UID: \"9a25d6ab-b556-4812-b876-92f7574e6da9\") " Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.037833 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-ceph" (OuterVolumeSpecName: "ceph") pod "9a25d6ab-b556-4812-b876-92f7574e6da9" (UID: "9a25d6ab-b556-4812-b876-92f7574e6da9"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.038182 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a25d6ab-b556-4812-b876-92f7574e6da9-kube-api-access-kng82" (OuterVolumeSpecName: "kube-api-access-kng82") pod "9a25d6ab-b556-4812-b876-92f7574e6da9" (UID: "9a25d6ab-b556-4812-b876-92f7574e6da9"). InnerVolumeSpecName "kube-api-access-kng82". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.060463 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "9a25d6ab-b556-4812-b876-92f7574e6da9" (UID: "9a25d6ab-b556-4812-b876-92f7574e6da9"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.061729 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "9a25d6ab-b556-4812-b876-92f7574e6da9" (UID: "9a25d6ab-b556-4812-b876-92f7574e6da9"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.133039 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.133067 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.133081 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kng82\" (UniqueName: \"kubernetes.io/projected/9a25d6ab-b556-4812-b876-92f7574e6da9-kube-api-access-kng82\") on node \"crc\" DevicePath \"\"" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.133092 4730 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/9a25d6ab-b556-4812-b876-92f7574e6da9-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.530581 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" event={"ID":"9a25d6ab-b556-4812-b876-92f7574e6da9","Type":"ContainerDied","Data":"642ef9748fe4f99e636bb75e00984117f982b14aa4bf418f0a7b4720468c6850"} Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.530916 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="642ef9748fe4f99e636bb75e00984117f982b14aa4bf418f0a7b4720468c6850" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.530681 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-2p45l" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.623379 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m"] Sep 30 10:29:04 crc kubenswrapper[4730]: E0930 10:29:04.623854 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a25d6ab-b556-4812-b876-92f7574e6da9" containerName="ssh-known-hosts-edpm-deployment" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.623874 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a25d6ab-b556-4812-b876-92f7574e6da9" containerName="ssh-known-hosts-edpm-deployment" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.624089 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a25d6ab-b556-4812-b876-92f7574e6da9" containerName="ssh-known-hosts-edpm-deployment" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.624857 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.630872 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.631013 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.631071 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.631227 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.633878 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.634255 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m"] Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.744481 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9xt9m\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.744545 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhzp6\" (UniqueName: \"kubernetes.io/projected/cda7e260-a520-4ac4-a1f4-b8e7684d2742-kube-api-access-zhzp6\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9xt9m\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.744568 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9xt9m\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.745270 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9xt9m\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.847314 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9xt9m\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.847480 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9xt9m\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.847521 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhzp6\" (UniqueName: \"kubernetes.io/projected/cda7e260-a520-4ac4-a1f4-b8e7684d2742-kube-api-access-zhzp6\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9xt9m\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.847542 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9xt9m\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.851935 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9xt9m\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.851957 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9xt9m\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.853803 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9xt9m\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.869698 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhzp6\" (UniqueName: \"kubernetes.io/projected/cda7e260-a520-4ac4-a1f4-b8e7684d2742-kube-api-access-zhzp6\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9xt9m\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:04 crc kubenswrapper[4730]: I0930 10:29:04.944376 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:05 crc kubenswrapper[4730]: I0930 10:29:05.477477 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m"] Sep 30 10:29:05 crc kubenswrapper[4730]: I0930 10:29:05.540410 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" event={"ID":"cda7e260-a520-4ac4-a1f4-b8e7684d2742","Type":"ContainerStarted","Data":"5393b58e96b9d15eea431a2e211ce76007fa0cb457266978b58e88c29b0f8c6d"} Sep 30 10:29:06 crc kubenswrapper[4730]: I0930 10:29:06.389510 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:29:06 crc kubenswrapper[4730]: E0930 10:29:06.389812 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:29:06 crc kubenswrapper[4730]: I0930 10:29:06.550905 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" event={"ID":"cda7e260-a520-4ac4-a1f4-b8e7684d2742","Type":"ContainerStarted","Data":"6d4829371431b838401f7545ea73418e9e24f5c4a763bd73b1eaadcb30598edc"} Sep 30 10:29:06 crc kubenswrapper[4730]: I0930 10:29:06.573720 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" podStartSLOduration=2.046878394 podStartE2EDuration="2.573701267s" podCreationTimestamp="2025-09-30 10:29:04 +0000 UTC" firstStartedPulling="2025-09-30 10:29:05.483456857 +0000 UTC m=+2389.816716850" lastFinishedPulling="2025-09-30 10:29:06.01027973 +0000 UTC m=+2390.343539723" observedRunningTime="2025-09-30 10:29:06.565212146 +0000 UTC m=+2390.898472139" watchObservedRunningTime="2025-09-30 10:29:06.573701267 +0000 UTC m=+2390.906961260" Sep 30 10:29:13 crc kubenswrapper[4730]: I0930 10:29:13.611104 4730 generic.go:334] "Generic (PLEG): container finished" podID="cda7e260-a520-4ac4-a1f4-b8e7684d2742" containerID="6d4829371431b838401f7545ea73418e9e24f5c4a763bd73b1eaadcb30598edc" exitCode=0 Sep 30 10:29:13 crc kubenswrapper[4730]: I0930 10:29:13.611222 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" event={"ID":"cda7e260-a520-4ac4-a1f4-b8e7684d2742","Type":"ContainerDied","Data":"6d4829371431b838401f7545ea73418e9e24f5c4a763bd73b1eaadcb30598edc"} Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.042848 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.166131 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-inventory\") pod \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.166195 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-ceph\") pod \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.166870 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-ssh-key\") pod \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.167146 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhzp6\" (UniqueName: \"kubernetes.io/projected/cda7e260-a520-4ac4-a1f4-b8e7684d2742-kube-api-access-zhzp6\") pod \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\" (UID: \"cda7e260-a520-4ac4-a1f4-b8e7684d2742\") " Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.172185 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cda7e260-a520-4ac4-a1f4-b8e7684d2742-kube-api-access-zhzp6" (OuterVolumeSpecName: "kube-api-access-zhzp6") pod "cda7e260-a520-4ac4-a1f4-b8e7684d2742" (UID: "cda7e260-a520-4ac4-a1f4-b8e7684d2742"). InnerVolumeSpecName "kube-api-access-zhzp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.173815 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-ceph" (OuterVolumeSpecName: "ceph") pod "cda7e260-a520-4ac4-a1f4-b8e7684d2742" (UID: "cda7e260-a520-4ac4-a1f4-b8e7684d2742"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.196929 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-inventory" (OuterVolumeSpecName: "inventory") pod "cda7e260-a520-4ac4-a1f4-b8e7684d2742" (UID: "cda7e260-a520-4ac4-a1f4-b8e7684d2742"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.199366 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cda7e260-a520-4ac4-a1f4-b8e7684d2742" (UID: "cda7e260-a520-4ac4-a1f4-b8e7684d2742"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.269028 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhzp6\" (UniqueName: \"kubernetes.io/projected/cda7e260-a520-4ac4-a1f4-b8e7684d2742-kube-api-access-zhzp6\") on node \"crc\" DevicePath \"\"" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.269085 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.269097 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.269108 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cda7e260-a520-4ac4-a1f4-b8e7684d2742-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.629372 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" event={"ID":"cda7e260-a520-4ac4-a1f4-b8e7684d2742","Type":"ContainerDied","Data":"5393b58e96b9d15eea431a2e211ce76007fa0cb457266978b58e88c29b0f8c6d"} Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.629603 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5393b58e96b9d15eea431a2e211ce76007fa0cb457266978b58e88c29b0f8c6d" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.629637 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9xt9m" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.701685 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs"] Sep 30 10:29:15 crc kubenswrapper[4730]: E0930 10:29:15.702170 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cda7e260-a520-4ac4-a1f4-b8e7684d2742" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.702198 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cda7e260-a520-4ac4-a1f4-b8e7684d2742" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.702466 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="cda7e260-a520-4ac4-a1f4-b8e7684d2742" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.703429 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.713570 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.714005 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.714175 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.714313 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.716523 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.719163 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs"] Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.781233 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.781289 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.781411 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b2g8\" (UniqueName: \"kubernetes.io/projected/b5e99fb9-fd07-410f-8c9b-bde6849b5655-kube-api-access-5b2g8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.781486 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.882855 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.882905 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.882957 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b2g8\" (UniqueName: \"kubernetes.io/projected/b5e99fb9-fd07-410f-8c9b-bde6849b5655-kube-api-access-5b2g8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.883004 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.887196 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.887249 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.892339 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:15 crc kubenswrapper[4730]: I0930 10:29:15.899387 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b2g8\" (UniqueName: \"kubernetes.io/projected/b5e99fb9-fd07-410f-8c9b-bde6849b5655-kube-api-access-5b2g8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:16 crc kubenswrapper[4730]: I0930 10:29:16.024162 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:16 crc kubenswrapper[4730]: I0930 10:29:16.542751 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs"] Sep 30 10:29:16 crc kubenswrapper[4730]: I0930 10:29:16.553871 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 10:29:16 crc kubenswrapper[4730]: I0930 10:29:16.644300 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" event={"ID":"b5e99fb9-fd07-410f-8c9b-bde6849b5655","Type":"ContainerStarted","Data":"5fc28eea7b6c8f134cf4b084c1baac21c9fb4d789f1054908b02d67d92a9e731"} Sep 30 10:29:16 crc kubenswrapper[4730]: I0930 10:29:16.998193 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:29:17 crc kubenswrapper[4730]: I0930 10:29:17.654625 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" event={"ID":"b5e99fb9-fd07-410f-8c9b-bde6849b5655","Type":"ContainerStarted","Data":"b54746ccc3441b7ea5b82948d2f9ed30b18432a3debe82da36f5bbe021eb5e97"} Sep 30 10:29:17 crc kubenswrapper[4730]: I0930 10:29:17.670913 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" podStartSLOduration=2.229316993 podStartE2EDuration="2.670896864s" podCreationTimestamp="2025-09-30 10:29:15 +0000 UTC" firstStartedPulling="2025-09-30 10:29:16.553405335 +0000 UTC m=+2400.886665328" lastFinishedPulling="2025-09-30 10:29:16.994985206 +0000 UTC m=+2401.328245199" observedRunningTime="2025-09-30 10:29:17.66726742 +0000 UTC m=+2402.000527413" watchObservedRunningTime="2025-09-30 10:29:17.670896864 +0000 UTC m=+2402.004156857" Sep 30 10:29:19 crc kubenswrapper[4730]: I0930 10:29:19.381320 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:29:19 crc kubenswrapper[4730]: E0930 10:29:19.382216 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:29:26 crc kubenswrapper[4730]: I0930 10:29:26.740042 4730 generic.go:334] "Generic (PLEG): container finished" podID="b5e99fb9-fd07-410f-8c9b-bde6849b5655" containerID="b54746ccc3441b7ea5b82948d2f9ed30b18432a3debe82da36f5bbe021eb5e97" exitCode=0 Sep 30 10:29:26 crc kubenswrapper[4730]: I0930 10:29:26.740110 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" event={"ID":"b5e99fb9-fd07-410f-8c9b-bde6849b5655","Type":"ContainerDied","Data":"b54746ccc3441b7ea5b82948d2f9ed30b18432a3debe82da36f5bbe021eb5e97"} Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.146634 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.318469 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-inventory\") pod \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.318723 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5b2g8\" (UniqueName: \"kubernetes.io/projected/b5e99fb9-fd07-410f-8c9b-bde6849b5655-kube-api-access-5b2g8\") pod \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.318966 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-ssh-key\") pod \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.319585 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-ceph\") pod \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\" (UID: \"b5e99fb9-fd07-410f-8c9b-bde6849b5655\") " Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.324220 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-ceph" (OuterVolumeSpecName: "ceph") pod "b5e99fb9-fd07-410f-8c9b-bde6849b5655" (UID: "b5e99fb9-fd07-410f-8c9b-bde6849b5655"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.326294 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5e99fb9-fd07-410f-8c9b-bde6849b5655-kube-api-access-5b2g8" (OuterVolumeSpecName: "kube-api-access-5b2g8") pod "b5e99fb9-fd07-410f-8c9b-bde6849b5655" (UID: "b5e99fb9-fd07-410f-8c9b-bde6849b5655"). InnerVolumeSpecName "kube-api-access-5b2g8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.345250 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b5e99fb9-fd07-410f-8c9b-bde6849b5655" (UID: "b5e99fb9-fd07-410f-8c9b-bde6849b5655"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.346296 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-inventory" (OuterVolumeSpecName: "inventory") pod "b5e99fb9-fd07-410f-8c9b-bde6849b5655" (UID: "b5e99fb9-fd07-410f-8c9b-bde6849b5655"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.423093 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.423133 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.423145 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b5e99fb9-fd07-410f-8c9b-bde6849b5655-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.423156 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5b2g8\" (UniqueName: \"kubernetes.io/projected/b5e99fb9-fd07-410f-8c9b-bde6849b5655-kube-api-access-5b2g8\") on node \"crc\" DevicePath \"\"" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.759075 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" event={"ID":"b5e99fb9-fd07-410f-8c9b-bde6849b5655","Type":"ContainerDied","Data":"5fc28eea7b6c8f134cf4b084c1baac21c9fb4d789f1054908b02d67d92a9e731"} Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.759114 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.759118 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5fc28eea7b6c8f134cf4b084c1baac21c9fb4d789f1054908b02d67d92a9e731" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.833560 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l"] Sep 30 10:29:28 crc kubenswrapper[4730]: E0930 10:29:28.834168 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5e99fb9-fd07-410f-8c9b-bde6849b5655" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.834197 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5e99fb9-fd07-410f-8c9b-bde6849b5655" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.834386 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5e99fb9-fd07-410f-8c9b-bde6849b5655" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.835136 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.838926 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.839100 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.839156 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.840502 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.840568 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.840811 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.840819 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.843326 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.845791 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l"] Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.846102 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.932289 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.932337 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.932525 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.932589 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.932646 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.932680 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.932718 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvhdh\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-kube-api-access-lvhdh\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.932771 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.932899 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.933082 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.933120 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.933177 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.933268 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.933355 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:28 crc kubenswrapper[4730]: I0930 10:29:28.933410 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.035133 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.035460 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.035565 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.035721 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvhdh\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-kube-api-access-lvhdh\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.035840 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.035953 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.036048 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.036135 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.036229 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.036343 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.036466 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.036599 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.036780 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.036898 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.037052 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.040863 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.041581 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.042473 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.042777 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.042933 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.043058 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.043688 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.043707 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.044063 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.044567 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.044876 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.044899 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.045910 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.046441 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.053765 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvhdh\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-kube-api-access-lvhdh\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-47s8l\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.154115 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.705791 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l"] Sep 30 10:29:29 crc kubenswrapper[4730]: I0930 10:29:29.767505 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" event={"ID":"c191d318-9d8c-4aac-bbc9-371553bb29bf","Type":"ContainerStarted","Data":"0b10bc911a750479d9f8ce38e6879eec6c07f9f2a708c3af201a66764e8571e4"} Sep 30 10:29:31 crc kubenswrapper[4730]: I0930 10:29:31.787674 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" event={"ID":"c191d318-9d8c-4aac-bbc9-371553bb29bf","Type":"ContainerStarted","Data":"d98a05146e0813271f713083b66d38f1cd3b7ef08573c23d45f2084c011f19e4"} Sep 30 10:29:31 crc kubenswrapper[4730]: I0930 10:29:31.812044 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" podStartSLOduration=2.928937329 podStartE2EDuration="3.812022439s" podCreationTimestamp="2025-09-30 10:29:28 +0000 UTC" firstStartedPulling="2025-09-30 10:29:29.709358899 +0000 UTC m=+2414.042618892" lastFinishedPulling="2025-09-30 10:29:30.592444009 +0000 UTC m=+2414.925704002" observedRunningTime="2025-09-30 10:29:31.80591675 +0000 UTC m=+2416.139176743" watchObservedRunningTime="2025-09-30 10:29:31.812022439 +0000 UTC m=+2416.145282432" Sep 30 10:29:33 crc kubenswrapper[4730]: I0930 10:29:33.380524 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:29:33 crc kubenswrapper[4730]: E0930 10:29:33.380815 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:29:44 crc kubenswrapper[4730]: I0930 10:29:44.382408 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:29:44 crc kubenswrapper[4730]: E0930 10:29:44.383063 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:29:55 crc kubenswrapper[4730]: I0930 10:29:55.381137 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:29:55 crc kubenswrapper[4730]: E0930 10:29:55.383150 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.177365 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k"] Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.182484 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.186308 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.186576 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.199849 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k"] Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.346861 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxxrt\" (UniqueName: \"kubernetes.io/projected/d920f51a-11f4-408a-a55c-2a3ee16173da-kube-api-access-mxxrt\") pod \"collect-profiles-29320470-zzt2k\" (UID: \"d920f51a-11f4-408a-a55c-2a3ee16173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.347204 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d920f51a-11f4-408a-a55c-2a3ee16173da-secret-volume\") pod \"collect-profiles-29320470-zzt2k\" (UID: \"d920f51a-11f4-408a-a55c-2a3ee16173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.347418 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d920f51a-11f4-408a-a55c-2a3ee16173da-config-volume\") pod \"collect-profiles-29320470-zzt2k\" (UID: \"d920f51a-11f4-408a-a55c-2a3ee16173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.450479 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxxrt\" (UniqueName: \"kubernetes.io/projected/d920f51a-11f4-408a-a55c-2a3ee16173da-kube-api-access-mxxrt\") pod \"collect-profiles-29320470-zzt2k\" (UID: \"d920f51a-11f4-408a-a55c-2a3ee16173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.450560 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d920f51a-11f4-408a-a55c-2a3ee16173da-secret-volume\") pod \"collect-profiles-29320470-zzt2k\" (UID: \"d920f51a-11f4-408a-a55c-2a3ee16173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.451163 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d920f51a-11f4-408a-a55c-2a3ee16173da-config-volume\") pod \"collect-profiles-29320470-zzt2k\" (UID: \"d920f51a-11f4-408a-a55c-2a3ee16173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.452728 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d920f51a-11f4-408a-a55c-2a3ee16173da-config-volume\") pod \"collect-profiles-29320470-zzt2k\" (UID: \"d920f51a-11f4-408a-a55c-2a3ee16173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.456856 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d920f51a-11f4-408a-a55c-2a3ee16173da-secret-volume\") pod \"collect-profiles-29320470-zzt2k\" (UID: \"d920f51a-11f4-408a-a55c-2a3ee16173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.470976 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxxrt\" (UniqueName: \"kubernetes.io/projected/d920f51a-11f4-408a-a55c-2a3ee16173da-kube-api-access-mxxrt\") pod \"collect-profiles-29320470-zzt2k\" (UID: \"d920f51a-11f4-408a-a55c-2a3ee16173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" Sep 30 10:30:00 crc kubenswrapper[4730]: I0930 10:30:00.523570 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" Sep 30 10:30:01 crc kubenswrapper[4730]: I0930 10:30:01.007459 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k"] Sep 30 10:30:01 crc kubenswrapper[4730]: I0930 10:30:01.054436 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" event={"ID":"d920f51a-11f4-408a-a55c-2a3ee16173da","Type":"ContainerStarted","Data":"7efd3aff92136ed7981f2150e989dd4ee5a60b63ed2cd9ced21bf481949d2e90"} Sep 30 10:30:02 crc kubenswrapper[4730]: I0930 10:30:02.065214 4730 generic.go:334] "Generic (PLEG): container finished" podID="d920f51a-11f4-408a-a55c-2a3ee16173da" containerID="fa43702e2616338e29537c06c8d80c2120be826d7694836cfe65017edb9c8735" exitCode=0 Sep 30 10:30:02 crc kubenswrapper[4730]: I0930 10:30:02.065317 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" event={"ID":"d920f51a-11f4-408a-a55c-2a3ee16173da","Type":"ContainerDied","Data":"fa43702e2616338e29537c06c8d80c2120be826d7694836cfe65017edb9c8735"} Sep 30 10:30:03 crc kubenswrapper[4730]: I0930 10:30:03.439315 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" Sep 30 10:30:03 crc kubenswrapper[4730]: I0930 10:30:03.536670 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d920f51a-11f4-408a-a55c-2a3ee16173da-secret-volume\") pod \"d920f51a-11f4-408a-a55c-2a3ee16173da\" (UID: \"d920f51a-11f4-408a-a55c-2a3ee16173da\") " Sep 30 10:30:03 crc kubenswrapper[4730]: I0930 10:30:03.536849 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d920f51a-11f4-408a-a55c-2a3ee16173da-config-volume\") pod \"d920f51a-11f4-408a-a55c-2a3ee16173da\" (UID: \"d920f51a-11f4-408a-a55c-2a3ee16173da\") " Sep 30 10:30:03 crc kubenswrapper[4730]: I0930 10:30:03.536919 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxxrt\" (UniqueName: \"kubernetes.io/projected/d920f51a-11f4-408a-a55c-2a3ee16173da-kube-api-access-mxxrt\") pod \"d920f51a-11f4-408a-a55c-2a3ee16173da\" (UID: \"d920f51a-11f4-408a-a55c-2a3ee16173da\") " Sep 30 10:30:03 crc kubenswrapper[4730]: I0930 10:30:03.537928 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d920f51a-11f4-408a-a55c-2a3ee16173da-config-volume" (OuterVolumeSpecName: "config-volume") pod "d920f51a-11f4-408a-a55c-2a3ee16173da" (UID: "d920f51a-11f4-408a-a55c-2a3ee16173da"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:30:03 crc kubenswrapper[4730]: I0930 10:30:03.544646 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d920f51a-11f4-408a-a55c-2a3ee16173da-kube-api-access-mxxrt" (OuterVolumeSpecName: "kube-api-access-mxxrt") pod "d920f51a-11f4-408a-a55c-2a3ee16173da" (UID: "d920f51a-11f4-408a-a55c-2a3ee16173da"). InnerVolumeSpecName "kube-api-access-mxxrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:30:03 crc kubenswrapper[4730]: I0930 10:30:03.544943 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d920f51a-11f4-408a-a55c-2a3ee16173da-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d920f51a-11f4-408a-a55c-2a3ee16173da" (UID: "d920f51a-11f4-408a-a55c-2a3ee16173da"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:03 crc kubenswrapper[4730]: I0930 10:30:03.639415 4730 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d920f51a-11f4-408a-a55c-2a3ee16173da-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:03 crc kubenswrapper[4730]: I0930 10:30:03.639453 4730 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d920f51a-11f4-408a-a55c-2a3ee16173da-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:03 crc kubenswrapper[4730]: I0930 10:30:03.639501 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxxrt\" (UniqueName: \"kubernetes.io/projected/d920f51a-11f4-408a-a55c-2a3ee16173da-kube-api-access-mxxrt\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:04 crc kubenswrapper[4730]: I0930 10:30:04.085911 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" event={"ID":"d920f51a-11f4-408a-a55c-2a3ee16173da","Type":"ContainerDied","Data":"7efd3aff92136ed7981f2150e989dd4ee5a60b63ed2cd9ced21bf481949d2e90"} Sep 30 10:30:04 crc kubenswrapper[4730]: I0930 10:30:04.085955 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7efd3aff92136ed7981f2150e989dd4ee5a60b63ed2cd9ced21bf481949d2e90" Sep 30 10:30:04 crc kubenswrapper[4730]: I0930 10:30:04.086036 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k" Sep 30 10:30:04 crc kubenswrapper[4730]: I0930 10:30:04.524709 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz"] Sep 30 10:30:04 crc kubenswrapper[4730]: I0930 10:30:04.534032 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320425-gwwdz"] Sep 30 10:30:06 crc kubenswrapper[4730]: I0930 10:30:06.392490 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47a443f7-7d61-447b-b119-29dcd51b1b18" path="/var/lib/kubelet/pods/47a443f7-7d61-447b-b119-29dcd51b1b18/volumes" Sep 30 10:30:09 crc kubenswrapper[4730]: I0930 10:30:09.129066 4730 generic.go:334] "Generic (PLEG): container finished" podID="c191d318-9d8c-4aac-bbc9-371553bb29bf" containerID="d98a05146e0813271f713083b66d38f1cd3b7ef08573c23d45f2084c011f19e4" exitCode=0 Sep 30 10:30:09 crc kubenswrapper[4730]: I0930 10:30:09.129159 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" event={"ID":"c191d318-9d8c-4aac-bbc9-371553bb29bf","Type":"ContainerDied","Data":"d98a05146e0813271f713083b66d38f1cd3b7ef08573c23d45f2084c011f19e4"} Sep 30 10:30:09 crc kubenswrapper[4730]: I0930 10:30:09.381193 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:30:09 crc kubenswrapper[4730]: E0930 10:30:09.381477 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.553409 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.681476 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.681595 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ssh-key\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.681648 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.681674 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-nova-combined-ca-bundle\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.681736 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-repo-setup-combined-ca-bundle\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.681755 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-libvirt-combined-ca-bundle\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.681782 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ovn-combined-ca-bundle\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.681830 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-telemetry-combined-ca-bundle\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.681910 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-bootstrap-combined-ca-bundle\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.681940 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.681957 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ceph\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.681982 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.682000 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-inventory\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.682032 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvhdh\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-kube-api-access-lvhdh\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.682072 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-neutron-metadata-combined-ca-bundle\") pod \"c191d318-9d8c-4aac-bbc9-371553bb29bf\" (UID: \"c191d318-9d8c-4aac-bbc9-371553bb29bf\") " Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.687886 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.687951 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.688251 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.688263 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ceph" (OuterVolumeSpecName: "ceph") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.688305 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.689768 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.690127 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.690851 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.691483 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.691940 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-kube-api-access-lvhdh" (OuterVolumeSpecName: "kube-api-access-lvhdh") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "kube-api-access-lvhdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.694496 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.695107 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.695362 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.713250 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.715429 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-inventory" (OuterVolumeSpecName: "inventory") pod "c191d318-9d8c-4aac-bbc9-371553bb29bf" (UID: "c191d318-9d8c-4aac-bbc9-371553bb29bf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784082 4730 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784120 4730 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784135 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784143 4730 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784155 4730 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784164 4730 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784172 4730 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784181 4730 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784274 4730 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784290 4730 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784299 4730 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784307 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784317 4730 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784326 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c191d318-9d8c-4aac-bbc9-371553bb29bf-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:10 crc kubenswrapper[4730]: I0930 10:30:10.784334 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvhdh\" (UniqueName: \"kubernetes.io/projected/c191d318-9d8c-4aac-bbc9-371553bb29bf-kube-api-access-lvhdh\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.149328 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" event={"ID":"c191d318-9d8c-4aac-bbc9-371553bb29bf","Type":"ContainerDied","Data":"0b10bc911a750479d9f8ce38e6879eec6c07f9f2a708c3af201a66764e8571e4"} Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.149367 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b10bc911a750479d9f8ce38e6879eec6c07f9f2a708c3af201a66764e8571e4" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.149426 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-47s8l" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.290437 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw"] Sep 30 10:30:11 crc kubenswrapper[4730]: E0930 10:30:11.290811 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c191d318-9d8c-4aac-bbc9-371553bb29bf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.290830 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="c191d318-9d8c-4aac-bbc9-371553bb29bf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 10:30:11 crc kubenswrapper[4730]: E0930 10:30:11.290848 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d920f51a-11f4-408a-a55c-2a3ee16173da" containerName="collect-profiles" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.290855 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="d920f51a-11f4-408a-a55c-2a3ee16173da" containerName="collect-profiles" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.291030 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="c191d318-9d8c-4aac-bbc9-371553bb29bf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.291049 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="d920f51a-11f4-408a-a55c-2a3ee16173da" containerName="collect-profiles" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.291667 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.294548 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.294645 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.294653 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.294553 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.294554 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.307539 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw"] Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.393713 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.394147 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.394346 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.394398 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7c5q\" (UniqueName: \"kubernetes.io/projected/d0e46e4d-3e83-4381-b519-f840ffb1911b-kube-api-access-r7c5q\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.496286 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.496424 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.496564 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7c5q\" (UniqueName: \"kubernetes.io/projected/d0e46e4d-3e83-4381-b519-f840ffb1911b-kube-api-access-r7c5q\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.496596 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.500567 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.501044 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.515941 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.520820 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7c5q\" (UniqueName: \"kubernetes.io/projected/d0e46e4d-3e83-4381-b519-f840ffb1911b-kube-api-access-r7c5q\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:11 crc kubenswrapper[4730]: I0930 10:30:11.609703 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:12 crc kubenswrapper[4730]: I0930 10:30:12.166789 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw"] Sep 30 10:30:13 crc kubenswrapper[4730]: I0930 10:30:13.173745 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" event={"ID":"d0e46e4d-3e83-4381-b519-f840ffb1911b","Type":"ContainerStarted","Data":"5483e14f8ac37acb1d8785a855bce6f6bc3ecc6185be15189ae3b3954d9a3887"} Sep 30 10:30:13 crc kubenswrapper[4730]: I0930 10:30:13.174037 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" event={"ID":"d0e46e4d-3e83-4381-b519-f840ffb1911b","Type":"ContainerStarted","Data":"795256b898dc30ec5664041721c7bba4dff4bcdad3234a69217788325d5ddded"} Sep 30 10:30:13 crc kubenswrapper[4730]: I0930 10:30:13.201912 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" podStartSLOduration=1.5402023599999999 podStartE2EDuration="2.201894079s" podCreationTimestamp="2025-09-30 10:30:11 +0000 UTC" firstStartedPulling="2025-09-30 10:30:12.172602659 +0000 UTC m=+2456.505862652" lastFinishedPulling="2025-09-30 10:30:12.834294378 +0000 UTC m=+2457.167554371" observedRunningTime="2025-09-30 10:30:13.194100527 +0000 UTC m=+2457.527360530" watchObservedRunningTime="2025-09-30 10:30:13.201894079 +0000 UTC m=+2457.535154072" Sep 30 10:30:16 crc kubenswrapper[4730]: I0930 10:30:16.641869 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ftdsm"] Sep 30 10:30:16 crc kubenswrapper[4730]: I0930 10:30:16.644985 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:16 crc kubenswrapper[4730]: I0930 10:30:16.658361 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ftdsm"] Sep 30 10:30:16 crc kubenswrapper[4730]: I0930 10:30:16.695277 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kcfq\" (UniqueName: \"kubernetes.io/projected/72ba1944-be78-4291-bf77-2ebb5c70f7a1-kube-api-access-6kcfq\") pod \"redhat-operators-ftdsm\" (UID: \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\") " pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:16 crc kubenswrapper[4730]: I0930 10:30:16.695340 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72ba1944-be78-4291-bf77-2ebb5c70f7a1-catalog-content\") pod \"redhat-operators-ftdsm\" (UID: \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\") " pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:16 crc kubenswrapper[4730]: I0930 10:30:16.695478 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72ba1944-be78-4291-bf77-2ebb5c70f7a1-utilities\") pod \"redhat-operators-ftdsm\" (UID: \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\") " pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:16 crc kubenswrapper[4730]: I0930 10:30:16.797672 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kcfq\" (UniqueName: \"kubernetes.io/projected/72ba1944-be78-4291-bf77-2ebb5c70f7a1-kube-api-access-6kcfq\") pod \"redhat-operators-ftdsm\" (UID: \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\") " pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:16 crc kubenswrapper[4730]: I0930 10:30:16.797741 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72ba1944-be78-4291-bf77-2ebb5c70f7a1-catalog-content\") pod \"redhat-operators-ftdsm\" (UID: \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\") " pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:16 crc kubenswrapper[4730]: I0930 10:30:16.797841 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72ba1944-be78-4291-bf77-2ebb5c70f7a1-utilities\") pod \"redhat-operators-ftdsm\" (UID: \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\") " pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:16 crc kubenswrapper[4730]: I0930 10:30:16.798388 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72ba1944-be78-4291-bf77-2ebb5c70f7a1-utilities\") pod \"redhat-operators-ftdsm\" (UID: \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\") " pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:16 crc kubenswrapper[4730]: I0930 10:30:16.798920 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72ba1944-be78-4291-bf77-2ebb5c70f7a1-catalog-content\") pod \"redhat-operators-ftdsm\" (UID: \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\") " pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:16 crc kubenswrapper[4730]: I0930 10:30:16.819522 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kcfq\" (UniqueName: \"kubernetes.io/projected/72ba1944-be78-4291-bf77-2ebb5c70f7a1-kube-api-access-6kcfq\") pod \"redhat-operators-ftdsm\" (UID: \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\") " pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:16 crc kubenswrapper[4730]: I0930 10:30:16.964292 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:17 crc kubenswrapper[4730]: I0930 10:30:17.472214 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ftdsm"] Sep 30 10:30:18 crc kubenswrapper[4730]: I0930 10:30:18.219162 4730 generic.go:334] "Generic (PLEG): container finished" podID="72ba1944-be78-4291-bf77-2ebb5c70f7a1" containerID="6285cc9bcb66e002fc4ffbbc80cd57825d367d1e4b2c088fe82521acba73a93d" exitCode=0 Sep 30 10:30:18 crc kubenswrapper[4730]: I0930 10:30:18.219324 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftdsm" event={"ID":"72ba1944-be78-4291-bf77-2ebb5c70f7a1","Type":"ContainerDied","Data":"6285cc9bcb66e002fc4ffbbc80cd57825d367d1e4b2c088fe82521acba73a93d"} Sep 30 10:30:18 crc kubenswrapper[4730]: I0930 10:30:18.219503 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftdsm" event={"ID":"72ba1944-be78-4291-bf77-2ebb5c70f7a1","Type":"ContainerStarted","Data":"e63b22565e2e9641afa5c112c94219602960350b20d7e8293423e98bf2f672ae"} Sep 30 10:30:19 crc kubenswrapper[4730]: I0930 10:30:19.230567 4730 generic.go:334] "Generic (PLEG): container finished" podID="d0e46e4d-3e83-4381-b519-f840ffb1911b" containerID="5483e14f8ac37acb1d8785a855bce6f6bc3ecc6185be15189ae3b3954d9a3887" exitCode=0 Sep 30 10:30:19 crc kubenswrapper[4730]: I0930 10:30:19.230628 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" event={"ID":"d0e46e4d-3e83-4381-b519-f840ffb1911b","Type":"ContainerDied","Data":"5483e14f8ac37acb1d8785a855bce6f6bc3ecc6185be15189ae3b3954d9a3887"} Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.245115 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftdsm" event={"ID":"72ba1944-be78-4291-bf77-2ebb5c70f7a1","Type":"ContainerStarted","Data":"544180309cc2875cc260e5514de4c45decd3d7c8339739db026840f4dd05c324"} Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.663088 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.786894 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-ceph\") pod \"d0e46e4d-3e83-4381-b519-f840ffb1911b\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.787067 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-ssh-key\") pod \"d0e46e4d-3e83-4381-b519-f840ffb1911b\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.787111 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-inventory\") pod \"d0e46e4d-3e83-4381-b519-f840ffb1911b\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.787154 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7c5q\" (UniqueName: \"kubernetes.io/projected/d0e46e4d-3e83-4381-b519-f840ffb1911b-kube-api-access-r7c5q\") pod \"d0e46e4d-3e83-4381-b519-f840ffb1911b\" (UID: \"d0e46e4d-3e83-4381-b519-f840ffb1911b\") " Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.792850 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-ceph" (OuterVolumeSpecName: "ceph") pod "d0e46e4d-3e83-4381-b519-f840ffb1911b" (UID: "d0e46e4d-3e83-4381-b519-f840ffb1911b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.797830 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0e46e4d-3e83-4381-b519-f840ffb1911b-kube-api-access-r7c5q" (OuterVolumeSpecName: "kube-api-access-r7c5q") pod "d0e46e4d-3e83-4381-b519-f840ffb1911b" (UID: "d0e46e4d-3e83-4381-b519-f840ffb1911b"). InnerVolumeSpecName "kube-api-access-r7c5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.819153 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d0e46e4d-3e83-4381-b519-f840ffb1911b" (UID: "d0e46e4d-3e83-4381-b519-f840ffb1911b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.819228 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-inventory" (OuterVolumeSpecName: "inventory") pod "d0e46e4d-3e83-4381-b519-f840ffb1911b" (UID: "d0e46e4d-3e83-4381-b519-f840ffb1911b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.889178 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.889219 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.889230 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0e46e4d-3e83-4381-b519-f840ffb1911b-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:20 crc kubenswrapper[4730]: I0930 10:30:20.889240 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7c5q\" (UniqueName: \"kubernetes.io/projected/d0e46e4d-3e83-4381-b519-f840ffb1911b-kube-api-access-r7c5q\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.254555 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.254554 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw" event={"ID":"d0e46e4d-3e83-4381-b519-f840ffb1911b","Type":"ContainerDied","Data":"795256b898dc30ec5664041721c7bba4dff4bcdad3234a69217788325d5ddded"} Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.255352 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="795256b898dc30ec5664041721c7bba4dff4bcdad3234a69217788325d5ddded" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.257558 4730 generic.go:334] "Generic (PLEG): container finished" podID="72ba1944-be78-4291-bf77-2ebb5c70f7a1" containerID="544180309cc2875cc260e5514de4c45decd3d7c8339739db026840f4dd05c324" exitCode=0 Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.257660 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftdsm" event={"ID":"72ba1944-be78-4291-bf77-2ebb5c70f7a1","Type":"ContainerDied","Data":"544180309cc2875cc260e5514de4c45decd3d7c8339739db026840f4dd05c324"} Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.317223 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6"] Sep 30 10:30:21 crc kubenswrapper[4730]: E0930 10:30:21.318056 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0e46e4d-3e83-4381-b519-f840ffb1911b" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.318122 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0e46e4d-3e83-4381-b519-f840ffb1911b" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.318376 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0e46e4d-3e83-4381-b519-f840ffb1911b" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.319077 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.321301 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.326250 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.327945 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.327994 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.328030 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.328269 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.329778 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6"] Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.397036 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.397085 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.397110 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.397129 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.397348 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.397551 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgpmn\" (UniqueName: \"kubernetes.io/projected/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-kube-api-access-mgpmn\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.499519 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgpmn\" (UniqueName: \"kubernetes.io/projected/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-kube-api-access-mgpmn\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.499657 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.499684 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.499715 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.499740 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.499814 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.501484 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.505845 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.505948 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.506138 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.506283 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.518267 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgpmn\" (UniqueName: \"kubernetes.io/projected/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-kube-api-access-mgpmn\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-br6q6\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:21 crc kubenswrapper[4730]: I0930 10:30:21.645014 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:30:22 crc kubenswrapper[4730]: I0930 10:30:22.147727 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6"] Sep 30 10:30:22 crc kubenswrapper[4730]: W0930 10:30:22.149857 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae1fcc30_65e0_4f9d_9ffa_bd87c1effd02.slice/crio-224b4fd9a282c31db9f3365177049eadbba6b8b25cf7ee4dbb27af15ea011b73 WatchSource:0}: Error finding container 224b4fd9a282c31db9f3365177049eadbba6b8b25cf7ee4dbb27af15ea011b73: Status 404 returned error can't find the container with id 224b4fd9a282c31db9f3365177049eadbba6b8b25cf7ee4dbb27af15ea011b73 Sep 30 10:30:22 crc kubenswrapper[4730]: I0930 10:30:22.269081 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" event={"ID":"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02","Type":"ContainerStarted","Data":"224b4fd9a282c31db9f3365177049eadbba6b8b25cf7ee4dbb27af15ea011b73"} Sep 30 10:30:22 crc kubenswrapper[4730]: I0930 10:30:22.270926 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftdsm" event={"ID":"72ba1944-be78-4291-bf77-2ebb5c70f7a1","Type":"ContainerStarted","Data":"f2addf6c26fb6d89942bf2e0d19d7bb7837042e138c630ada18cc68be78817b0"} Sep 30 10:30:22 crc kubenswrapper[4730]: I0930 10:30:22.297510 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ftdsm" podStartSLOduration=2.745343541 podStartE2EDuration="6.297486214s" podCreationTimestamp="2025-09-30 10:30:16 +0000 UTC" firstStartedPulling="2025-09-30 10:30:18.222352897 +0000 UTC m=+2462.555612890" lastFinishedPulling="2025-09-30 10:30:21.77449557 +0000 UTC m=+2466.107755563" observedRunningTime="2025-09-30 10:30:22.294083154 +0000 UTC m=+2466.627343147" watchObservedRunningTime="2025-09-30 10:30:22.297486214 +0000 UTC m=+2466.630746207" Sep 30 10:30:23 crc kubenswrapper[4730]: I0930 10:30:23.284702 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" event={"ID":"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02","Type":"ContainerStarted","Data":"e04dceccf9c542634d76f1b1214f817f63e681a27a96c5374d856ff0781333e3"} Sep 30 10:30:23 crc kubenswrapper[4730]: I0930 10:30:23.316222 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" podStartSLOduration=1.8762830799999999 podStartE2EDuration="2.316197588s" podCreationTimestamp="2025-09-30 10:30:21 +0000 UTC" firstStartedPulling="2025-09-30 10:30:22.15314474 +0000 UTC m=+2466.486404733" lastFinishedPulling="2025-09-30 10:30:22.593059248 +0000 UTC m=+2466.926319241" observedRunningTime="2025-09-30 10:30:23.308769164 +0000 UTC m=+2467.642029167" watchObservedRunningTime="2025-09-30 10:30:23.316197588 +0000 UTC m=+2467.649457581" Sep 30 10:30:24 crc kubenswrapper[4730]: I0930 10:30:24.381029 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:30:24 crc kubenswrapper[4730]: E0930 10:30:24.381286 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:30:24 crc kubenswrapper[4730]: I0930 10:30:24.644696 4730 scope.go:117] "RemoveContainer" containerID="e94bbc773fe8bfcdb493a02ccda8b8ce059c6ce0e8368296e2b13dd81a7d5f05" Sep 30 10:30:26 crc kubenswrapper[4730]: I0930 10:30:26.965456 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:26 crc kubenswrapper[4730]: I0930 10:30:26.966963 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:27 crc kubenswrapper[4730]: I0930 10:30:27.019636 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:27 crc kubenswrapper[4730]: I0930 10:30:27.381701 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:27 crc kubenswrapper[4730]: I0930 10:30:27.628088 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ftdsm"] Sep 30 10:30:29 crc kubenswrapper[4730]: I0930 10:30:29.353354 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ftdsm" podUID="72ba1944-be78-4291-bf77-2ebb5c70f7a1" containerName="registry-server" containerID="cri-o://f2addf6c26fb6d89942bf2e0d19d7bb7837042e138c630ada18cc68be78817b0" gracePeriod=2 Sep 30 10:30:29 crc kubenswrapper[4730]: I0930 10:30:29.826038 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:29 crc kubenswrapper[4730]: I0930 10:30:29.898789 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72ba1944-be78-4291-bf77-2ebb5c70f7a1-utilities\") pod \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\" (UID: \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\") " Sep 30 10:30:29 crc kubenswrapper[4730]: I0930 10:30:29.899343 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72ba1944-be78-4291-bf77-2ebb5c70f7a1-catalog-content\") pod \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\" (UID: \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\") " Sep 30 10:30:29 crc kubenswrapper[4730]: I0930 10:30:29.899424 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kcfq\" (UniqueName: \"kubernetes.io/projected/72ba1944-be78-4291-bf77-2ebb5c70f7a1-kube-api-access-6kcfq\") pod \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\" (UID: \"72ba1944-be78-4291-bf77-2ebb5c70f7a1\") " Sep 30 10:30:29 crc kubenswrapper[4730]: I0930 10:30:29.900498 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72ba1944-be78-4291-bf77-2ebb5c70f7a1-utilities" (OuterVolumeSpecName: "utilities") pod "72ba1944-be78-4291-bf77-2ebb5c70f7a1" (UID: "72ba1944-be78-4291-bf77-2ebb5c70f7a1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:30:29 crc kubenswrapper[4730]: I0930 10:30:29.906186 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72ba1944-be78-4291-bf77-2ebb5c70f7a1-kube-api-access-6kcfq" (OuterVolumeSpecName: "kube-api-access-6kcfq") pod "72ba1944-be78-4291-bf77-2ebb5c70f7a1" (UID: "72ba1944-be78-4291-bf77-2ebb5c70f7a1"). InnerVolumeSpecName "kube-api-access-6kcfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:30:29 crc kubenswrapper[4730]: I0930 10:30:29.979701 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72ba1944-be78-4291-bf77-2ebb5c70f7a1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "72ba1944-be78-4291-bf77-2ebb5c70f7a1" (UID: "72ba1944-be78-4291-bf77-2ebb5c70f7a1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.001957 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72ba1944-be78-4291-bf77-2ebb5c70f7a1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.002008 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kcfq\" (UniqueName: \"kubernetes.io/projected/72ba1944-be78-4291-bf77-2ebb5c70f7a1-kube-api-access-6kcfq\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.002023 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72ba1944-be78-4291-bf77-2ebb5c70f7a1-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.364351 4730 generic.go:334] "Generic (PLEG): container finished" podID="72ba1944-be78-4291-bf77-2ebb5c70f7a1" containerID="f2addf6c26fb6d89942bf2e0d19d7bb7837042e138c630ada18cc68be78817b0" exitCode=0 Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.364399 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ftdsm" Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.364402 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftdsm" event={"ID":"72ba1944-be78-4291-bf77-2ebb5c70f7a1","Type":"ContainerDied","Data":"f2addf6c26fb6d89942bf2e0d19d7bb7837042e138c630ada18cc68be78817b0"} Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.364433 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftdsm" event={"ID":"72ba1944-be78-4291-bf77-2ebb5c70f7a1","Type":"ContainerDied","Data":"e63b22565e2e9641afa5c112c94219602960350b20d7e8293423e98bf2f672ae"} Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.364452 4730 scope.go:117] "RemoveContainer" containerID="f2addf6c26fb6d89942bf2e0d19d7bb7837042e138c630ada18cc68be78817b0" Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.386833 4730 scope.go:117] "RemoveContainer" containerID="544180309cc2875cc260e5514de4c45decd3d7c8339739db026840f4dd05c324" Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.406184 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ftdsm"] Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.414251 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ftdsm"] Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.429061 4730 scope.go:117] "RemoveContainer" containerID="6285cc9bcb66e002fc4ffbbc80cd57825d367d1e4b2c088fe82521acba73a93d" Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.458801 4730 scope.go:117] "RemoveContainer" containerID="f2addf6c26fb6d89942bf2e0d19d7bb7837042e138c630ada18cc68be78817b0" Sep 30 10:30:30 crc kubenswrapper[4730]: E0930 10:30:30.461423 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2addf6c26fb6d89942bf2e0d19d7bb7837042e138c630ada18cc68be78817b0\": container with ID starting with f2addf6c26fb6d89942bf2e0d19d7bb7837042e138c630ada18cc68be78817b0 not found: ID does not exist" containerID="f2addf6c26fb6d89942bf2e0d19d7bb7837042e138c630ada18cc68be78817b0" Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.461564 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2addf6c26fb6d89942bf2e0d19d7bb7837042e138c630ada18cc68be78817b0"} err="failed to get container status \"f2addf6c26fb6d89942bf2e0d19d7bb7837042e138c630ada18cc68be78817b0\": rpc error: code = NotFound desc = could not find container \"f2addf6c26fb6d89942bf2e0d19d7bb7837042e138c630ada18cc68be78817b0\": container with ID starting with f2addf6c26fb6d89942bf2e0d19d7bb7837042e138c630ada18cc68be78817b0 not found: ID does not exist" Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.461691 4730 scope.go:117] "RemoveContainer" containerID="544180309cc2875cc260e5514de4c45decd3d7c8339739db026840f4dd05c324" Sep 30 10:30:30 crc kubenswrapper[4730]: E0930 10:30:30.462068 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"544180309cc2875cc260e5514de4c45decd3d7c8339739db026840f4dd05c324\": container with ID starting with 544180309cc2875cc260e5514de4c45decd3d7c8339739db026840f4dd05c324 not found: ID does not exist" containerID="544180309cc2875cc260e5514de4c45decd3d7c8339739db026840f4dd05c324" Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.462195 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"544180309cc2875cc260e5514de4c45decd3d7c8339739db026840f4dd05c324"} err="failed to get container status \"544180309cc2875cc260e5514de4c45decd3d7c8339739db026840f4dd05c324\": rpc error: code = NotFound desc = could not find container \"544180309cc2875cc260e5514de4c45decd3d7c8339739db026840f4dd05c324\": container with ID starting with 544180309cc2875cc260e5514de4c45decd3d7c8339739db026840f4dd05c324 not found: ID does not exist" Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.462281 4730 scope.go:117] "RemoveContainer" containerID="6285cc9bcb66e002fc4ffbbc80cd57825d367d1e4b2c088fe82521acba73a93d" Sep 30 10:30:30 crc kubenswrapper[4730]: E0930 10:30:30.462662 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6285cc9bcb66e002fc4ffbbc80cd57825d367d1e4b2c088fe82521acba73a93d\": container with ID starting with 6285cc9bcb66e002fc4ffbbc80cd57825d367d1e4b2c088fe82521acba73a93d not found: ID does not exist" containerID="6285cc9bcb66e002fc4ffbbc80cd57825d367d1e4b2c088fe82521acba73a93d" Sep 30 10:30:30 crc kubenswrapper[4730]: I0930 10:30:30.462751 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6285cc9bcb66e002fc4ffbbc80cd57825d367d1e4b2c088fe82521acba73a93d"} err="failed to get container status \"6285cc9bcb66e002fc4ffbbc80cd57825d367d1e4b2c088fe82521acba73a93d\": rpc error: code = NotFound desc = could not find container \"6285cc9bcb66e002fc4ffbbc80cd57825d367d1e4b2c088fe82521acba73a93d\": container with ID starting with 6285cc9bcb66e002fc4ffbbc80cd57825d367d1e4b2c088fe82521acba73a93d not found: ID does not exist" Sep 30 10:30:32 crc kubenswrapper[4730]: I0930 10:30:32.394184 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72ba1944-be78-4291-bf77-2ebb5c70f7a1" path="/var/lib/kubelet/pods/72ba1944-be78-4291-bf77-2ebb5c70f7a1/volumes" Sep 30 10:30:38 crc kubenswrapper[4730]: I0930 10:30:38.380794 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:30:38 crc kubenswrapper[4730]: E0930 10:30:38.382500 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:30:51 crc kubenswrapper[4730]: I0930 10:30:51.381268 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:30:51 crc kubenswrapper[4730]: E0930 10:30:51.382016 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:31:04 crc kubenswrapper[4730]: I0930 10:31:04.382714 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:31:04 crc kubenswrapper[4730]: I0930 10:31:04.654054 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"fca465fdb97ac9d9a109d35ebeee9e761de6e92d75613c1d72ae65f001b89f7b"} Sep 30 10:31:42 crc kubenswrapper[4730]: I0930 10:31:42.017202 4730 generic.go:334] "Generic (PLEG): container finished" podID="ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02" containerID="e04dceccf9c542634d76f1b1214f817f63e681a27a96c5374d856ff0781333e3" exitCode=0 Sep 30 10:31:42 crc kubenswrapper[4730]: I0930 10:31:42.017274 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" event={"ID":"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02","Type":"ContainerDied","Data":"e04dceccf9c542634d76f1b1214f817f63e681a27a96c5374d856ff0781333e3"} Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.485500 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.579649 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ovn-combined-ca-bundle\") pod \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.579824 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ceph\") pod \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.580034 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgpmn\" (UniqueName: \"kubernetes.io/projected/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-kube-api-access-mgpmn\") pod \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.580088 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ssh-key\") pod \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.580128 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ovncontroller-config-0\") pod \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.580170 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-inventory\") pod \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\" (UID: \"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02\") " Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.591147 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ceph" (OuterVolumeSpecName: "ceph") pod "ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02" (UID: "ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.593810 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02" (UID: "ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.619672 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-inventory" (OuterVolumeSpecName: "inventory") pod "ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02" (UID: "ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.621072 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-kube-api-access-mgpmn" (OuterVolumeSpecName: "kube-api-access-mgpmn") pod "ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02" (UID: "ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02"). InnerVolumeSpecName "kube-api-access-mgpmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.621136 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02" (UID: "ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.632087 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02" (UID: "ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.683307 4730 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.683364 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.683376 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgpmn\" (UniqueName: \"kubernetes.io/projected/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-kube-api-access-mgpmn\") on node \"crc\" DevicePath \"\"" Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.683387 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.683401 4730 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:31:43 crc kubenswrapper[4730]: I0930 10:31:43.683412 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.037704 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" event={"ID":"ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02","Type":"ContainerDied","Data":"224b4fd9a282c31db9f3365177049eadbba6b8b25cf7ee4dbb27af15ea011b73"} Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.037743 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-br6q6" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.037774 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="224b4fd9a282c31db9f3365177049eadbba6b8b25cf7ee4dbb27af15ea011b73" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.191731 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8"] Sep 30 10:31:44 crc kubenswrapper[4730]: E0930 10:31:44.192113 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72ba1944-be78-4291-bf77-2ebb5c70f7a1" containerName="registry-server" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.192133 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="72ba1944-be78-4291-bf77-2ebb5c70f7a1" containerName="registry-server" Sep 30 10:31:44 crc kubenswrapper[4730]: E0930 10:31:44.192166 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72ba1944-be78-4291-bf77-2ebb5c70f7a1" containerName="extract-content" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.192175 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="72ba1944-be78-4291-bf77-2ebb5c70f7a1" containerName="extract-content" Sep 30 10:31:44 crc kubenswrapper[4730]: E0930 10:31:44.192198 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72ba1944-be78-4291-bf77-2ebb5c70f7a1" containerName="extract-utilities" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.192204 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="72ba1944-be78-4291-bf77-2ebb5c70f7a1" containerName="extract-utilities" Sep 30 10:31:44 crc kubenswrapper[4730]: E0930 10:31:44.192213 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.192219 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.192392 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.192407 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="72ba1944-be78-4291-bf77-2ebb5c70f7a1" containerName="registry-server" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.193096 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.198100 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.198159 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.198241 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.198345 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.198425 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.198556 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.198785 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.208566 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8"] Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.293571 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.293704 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.293738 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k45sv\" (UniqueName: \"kubernetes.io/projected/25b53534-c380-44dd-aa82-22606c2a5d22-kube-api-access-k45sv\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.293787 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.293855 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.293899 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.293993 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.395550 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.395667 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.395709 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.395729 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k45sv\" (UniqueName: \"kubernetes.io/projected/25b53534-c380-44dd-aa82-22606c2a5d22-kube-api-access-k45sv\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.395792 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.395851 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.395901 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.400623 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.400685 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.400693 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.401204 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.401461 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.401913 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.411447 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k45sv\" (UniqueName: \"kubernetes.io/projected/25b53534-c380-44dd-aa82-22606c2a5d22-kube-api-access-k45sv\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:44 crc kubenswrapper[4730]: I0930 10:31:44.509134 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:31:45 crc kubenswrapper[4730]: I0930 10:31:45.090175 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8"] Sep 30 10:31:46 crc kubenswrapper[4730]: I0930 10:31:46.063260 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" event={"ID":"25b53534-c380-44dd-aa82-22606c2a5d22","Type":"ContainerStarted","Data":"23f0a9258709fcf64790faacfdc74ff6cf9be597785384cbfe405cf0215f145c"} Sep 30 10:31:47 crc kubenswrapper[4730]: I0930 10:31:47.074328 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" event={"ID":"25b53534-c380-44dd-aa82-22606c2a5d22","Type":"ContainerStarted","Data":"9d3a3f143c2d75dfc58d7c5adaea17d7d03b255af1aaa814ae25c000e71c4c68"} Sep 30 10:31:47 crc kubenswrapper[4730]: I0930 10:31:47.101211 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" podStartSLOduration=2.396704312 podStartE2EDuration="3.101187656s" podCreationTimestamp="2025-09-30 10:31:44 +0000 UTC" firstStartedPulling="2025-09-30 10:31:45.116933683 +0000 UTC m=+2549.450193676" lastFinishedPulling="2025-09-30 10:31:45.821417017 +0000 UTC m=+2550.154677020" observedRunningTime="2025-09-30 10:31:47.091425062 +0000 UTC m=+2551.424685075" watchObservedRunningTime="2025-09-30 10:31:47.101187656 +0000 UTC m=+2551.434447649" Sep 30 10:32:44 crc kubenswrapper[4730]: I0930 10:32:44.605631 4730 generic.go:334] "Generic (PLEG): container finished" podID="25b53534-c380-44dd-aa82-22606c2a5d22" containerID="9d3a3f143c2d75dfc58d7c5adaea17d7d03b255af1aaa814ae25c000e71c4c68" exitCode=0 Sep 30 10:32:44 crc kubenswrapper[4730]: I0930 10:32:44.605743 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" event={"ID":"25b53534-c380-44dd-aa82-22606c2a5d22","Type":"ContainerDied","Data":"9d3a3f143c2d75dfc58d7c5adaea17d7d03b255af1aaa814ae25c000e71c4c68"} Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.024308 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.215563 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-nova-metadata-neutron-config-0\") pod \"25b53534-c380-44dd-aa82-22606c2a5d22\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.215651 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k45sv\" (UniqueName: \"kubernetes.io/projected/25b53534-c380-44dd-aa82-22606c2a5d22-kube-api-access-k45sv\") pod \"25b53534-c380-44dd-aa82-22606c2a5d22\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.215735 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-neutron-metadata-combined-ca-bundle\") pod \"25b53534-c380-44dd-aa82-22606c2a5d22\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.215950 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-ssh-key\") pod \"25b53534-c380-44dd-aa82-22606c2a5d22\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.215979 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-ceph\") pod \"25b53534-c380-44dd-aa82-22606c2a5d22\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.216195 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-inventory\") pod \"25b53534-c380-44dd-aa82-22606c2a5d22\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.216228 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-neutron-ovn-metadata-agent-neutron-config-0\") pod \"25b53534-c380-44dd-aa82-22606c2a5d22\" (UID: \"25b53534-c380-44dd-aa82-22606c2a5d22\") " Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.222079 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25b53534-c380-44dd-aa82-22606c2a5d22-kube-api-access-k45sv" (OuterVolumeSpecName: "kube-api-access-k45sv") pod "25b53534-c380-44dd-aa82-22606c2a5d22" (UID: "25b53534-c380-44dd-aa82-22606c2a5d22"). InnerVolumeSpecName "kube-api-access-k45sv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.227771 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-ceph" (OuterVolumeSpecName: "ceph") pod "25b53534-c380-44dd-aa82-22606c2a5d22" (UID: "25b53534-c380-44dd-aa82-22606c2a5d22"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.227822 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "25b53534-c380-44dd-aa82-22606c2a5d22" (UID: "25b53534-c380-44dd-aa82-22606c2a5d22"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.245180 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-inventory" (OuterVolumeSpecName: "inventory") pod "25b53534-c380-44dd-aa82-22606c2a5d22" (UID: "25b53534-c380-44dd-aa82-22606c2a5d22"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.245331 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "25b53534-c380-44dd-aa82-22606c2a5d22" (UID: "25b53534-c380-44dd-aa82-22606c2a5d22"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.248791 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "25b53534-c380-44dd-aa82-22606c2a5d22" (UID: "25b53534-c380-44dd-aa82-22606c2a5d22"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.251914 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "25b53534-c380-44dd-aa82-22606c2a5d22" (UID: "25b53534-c380-44dd-aa82-22606c2a5d22"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.318533 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.318569 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.318580 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.318589 4730 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.318600 4730 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.318623 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k45sv\" (UniqueName: \"kubernetes.io/projected/25b53534-c380-44dd-aa82-22606c2a5d22-kube-api-access-k45sv\") on node \"crc\" DevicePath \"\"" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.318632 4730 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25b53534-c380-44dd-aa82-22606c2a5d22-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.623795 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" event={"ID":"25b53534-c380-44dd-aa82-22606c2a5d22","Type":"ContainerDied","Data":"23f0a9258709fcf64790faacfdc74ff6cf9be597785384cbfe405cf0215f145c"} Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.623837 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23f0a9258709fcf64790faacfdc74ff6cf9be597785384cbfe405cf0215f145c" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.623859 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.719053 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h"] Sep 30 10:32:46 crc kubenswrapper[4730]: E0930 10:32:46.719437 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25b53534-c380-44dd-aa82-22606c2a5d22" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.719455 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="25b53534-c380-44dd-aa82-22606c2a5d22" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.719669 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="25b53534-c380-44dd-aa82-22606c2a5d22" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.720411 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.723798 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.724100 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.724297 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.724381 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.724356 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.724515 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.731070 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h"] Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.827464 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ds8nw\" (UniqueName: \"kubernetes.io/projected/114c62cf-b040-491e-90fa-794b4cc29361-kube-api-access-ds8nw\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.829768 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.830103 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.830181 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.831309 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.831501 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.933442 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.933771 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.933812 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.933832 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.933896 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ds8nw\" (UniqueName: \"kubernetes.io/projected/114c62cf-b040-491e-90fa-794b4cc29361-kube-api-access-ds8nw\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.933930 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.939840 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.939874 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.940561 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.940570 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.946251 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:46 crc kubenswrapper[4730]: I0930 10:32:46.954419 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ds8nw\" (UniqueName: \"kubernetes.io/projected/114c62cf-b040-491e-90fa-794b4cc29361-kube-api-access-ds8nw\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:47 crc kubenswrapper[4730]: I0930 10:32:47.083485 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:32:47 crc kubenswrapper[4730]: I0930 10:32:47.581795 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h"] Sep 30 10:32:47 crc kubenswrapper[4730]: I0930 10:32:47.633924 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" event={"ID":"114c62cf-b040-491e-90fa-794b4cc29361","Type":"ContainerStarted","Data":"22fa61813b62c4ee2936b5cc01a0dd2b79b0362894b534b3643a44d2412ce060"} Sep 30 10:32:48 crc kubenswrapper[4730]: I0930 10:32:48.645534 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" event={"ID":"114c62cf-b040-491e-90fa-794b4cc29361","Type":"ContainerStarted","Data":"838cdf913c66c9634ca88fb52904fee56b0055f65240832d6796b2700dd0896f"} Sep 30 10:32:48 crc kubenswrapper[4730]: I0930 10:32:48.669559 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" podStartSLOduration=2.207179694 podStartE2EDuration="2.669535528s" podCreationTimestamp="2025-09-30 10:32:46 +0000 UTC" firstStartedPulling="2025-09-30 10:32:47.583572092 +0000 UTC m=+2611.916832075" lastFinishedPulling="2025-09-30 10:32:48.045927916 +0000 UTC m=+2612.379187909" observedRunningTime="2025-09-30 10:32:48.662085614 +0000 UTC m=+2612.995345617" watchObservedRunningTime="2025-09-30 10:32:48.669535528 +0000 UTC m=+2613.002795521" Sep 30 10:33:32 crc kubenswrapper[4730]: I0930 10:33:32.336592 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:33:32 crc kubenswrapper[4730]: I0930 10:33:32.337171 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:34:02 crc kubenswrapper[4730]: I0930 10:34:02.336778 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:34:02 crc kubenswrapper[4730]: I0930 10:34:02.337306 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:34:21 crc kubenswrapper[4730]: I0930 10:34:21.890165 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qm56b"] Sep 30 10:34:21 crc kubenswrapper[4730]: I0930 10:34:21.892729 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:21 crc kubenswrapper[4730]: I0930 10:34:21.926031 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qm56b"] Sep 30 10:34:22 crc kubenswrapper[4730]: I0930 10:34:22.012821 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gm9pp\" (UniqueName: \"kubernetes.io/projected/dad5cfca-fcca-4fda-9578-b88478e33aa8-kube-api-access-gm9pp\") pod \"community-operators-qm56b\" (UID: \"dad5cfca-fcca-4fda-9578-b88478e33aa8\") " pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:22 crc kubenswrapper[4730]: I0930 10:34:22.013225 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dad5cfca-fcca-4fda-9578-b88478e33aa8-catalog-content\") pod \"community-operators-qm56b\" (UID: \"dad5cfca-fcca-4fda-9578-b88478e33aa8\") " pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:22 crc kubenswrapper[4730]: I0930 10:34:22.013372 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dad5cfca-fcca-4fda-9578-b88478e33aa8-utilities\") pod \"community-operators-qm56b\" (UID: \"dad5cfca-fcca-4fda-9578-b88478e33aa8\") " pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:22 crc kubenswrapper[4730]: I0930 10:34:22.115365 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dad5cfca-fcca-4fda-9578-b88478e33aa8-catalog-content\") pod \"community-operators-qm56b\" (UID: \"dad5cfca-fcca-4fda-9578-b88478e33aa8\") " pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:22 crc kubenswrapper[4730]: I0930 10:34:22.115429 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dad5cfca-fcca-4fda-9578-b88478e33aa8-utilities\") pod \"community-operators-qm56b\" (UID: \"dad5cfca-fcca-4fda-9578-b88478e33aa8\") " pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:22 crc kubenswrapper[4730]: I0930 10:34:22.115563 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gm9pp\" (UniqueName: \"kubernetes.io/projected/dad5cfca-fcca-4fda-9578-b88478e33aa8-kube-api-access-gm9pp\") pod \"community-operators-qm56b\" (UID: \"dad5cfca-fcca-4fda-9578-b88478e33aa8\") " pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:22 crc kubenswrapper[4730]: I0930 10:34:22.116034 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dad5cfca-fcca-4fda-9578-b88478e33aa8-catalog-content\") pod \"community-operators-qm56b\" (UID: \"dad5cfca-fcca-4fda-9578-b88478e33aa8\") " pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:22 crc kubenswrapper[4730]: I0930 10:34:22.116079 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dad5cfca-fcca-4fda-9578-b88478e33aa8-utilities\") pod \"community-operators-qm56b\" (UID: \"dad5cfca-fcca-4fda-9578-b88478e33aa8\") " pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:22 crc kubenswrapper[4730]: I0930 10:34:22.133907 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gm9pp\" (UniqueName: \"kubernetes.io/projected/dad5cfca-fcca-4fda-9578-b88478e33aa8-kube-api-access-gm9pp\") pod \"community-operators-qm56b\" (UID: \"dad5cfca-fcca-4fda-9578-b88478e33aa8\") " pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:22 crc kubenswrapper[4730]: I0930 10:34:22.218989 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:22 crc kubenswrapper[4730]: I0930 10:34:22.756532 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qm56b"] Sep 30 10:34:23 crc kubenswrapper[4730]: I0930 10:34:23.514579 4730 generic.go:334] "Generic (PLEG): container finished" podID="dad5cfca-fcca-4fda-9578-b88478e33aa8" containerID="bc6f94719cbb645d5442ead7eaf4fa0d2a6afd293b32ca1a4752e35e172f723e" exitCode=0 Sep 30 10:34:23 crc kubenswrapper[4730]: I0930 10:34:23.514668 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qm56b" event={"ID":"dad5cfca-fcca-4fda-9578-b88478e33aa8","Type":"ContainerDied","Data":"bc6f94719cbb645d5442ead7eaf4fa0d2a6afd293b32ca1a4752e35e172f723e"} Sep 30 10:34:23 crc kubenswrapper[4730]: I0930 10:34:23.514887 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qm56b" event={"ID":"dad5cfca-fcca-4fda-9578-b88478e33aa8","Type":"ContainerStarted","Data":"f51af58b516af76dce24106b10a0fdeb800886ba05cda14eb0573758e9000361"} Sep 30 10:34:23 crc kubenswrapper[4730]: I0930 10:34:23.516839 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 10:34:24 crc kubenswrapper[4730]: I0930 10:34:24.526683 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qm56b" event={"ID":"dad5cfca-fcca-4fda-9578-b88478e33aa8","Type":"ContainerStarted","Data":"83c6c488121ddb315d99eecb391fcfd8d4a84120d4abdd892a51306f0c02adac"} Sep 30 10:34:25 crc kubenswrapper[4730]: I0930 10:34:25.537859 4730 generic.go:334] "Generic (PLEG): container finished" podID="dad5cfca-fcca-4fda-9578-b88478e33aa8" containerID="83c6c488121ddb315d99eecb391fcfd8d4a84120d4abdd892a51306f0c02adac" exitCode=0 Sep 30 10:34:25 crc kubenswrapper[4730]: I0930 10:34:25.537909 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qm56b" event={"ID":"dad5cfca-fcca-4fda-9578-b88478e33aa8","Type":"ContainerDied","Data":"83c6c488121ddb315d99eecb391fcfd8d4a84120d4abdd892a51306f0c02adac"} Sep 30 10:34:26 crc kubenswrapper[4730]: I0930 10:34:26.549374 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qm56b" event={"ID":"dad5cfca-fcca-4fda-9578-b88478e33aa8","Type":"ContainerStarted","Data":"a8517fa35fd5acbe42df9481b4882c11df486a486f7055f7e6f2b9ce9e28d58a"} Sep 30 10:34:26 crc kubenswrapper[4730]: I0930 10:34:26.573748 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qm56b" podStartSLOduration=3.067227475 podStartE2EDuration="5.573730548s" podCreationTimestamp="2025-09-30 10:34:21 +0000 UTC" firstStartedPulling="2025-09-30 10:34:23.516562775 +0000 UTC m=+2707.849822768" lastFinishedPulling="2025-09-30 10:34:26.023065848 +0000 UTC m=+2710.356325841" observedRunningTime="2025-09-30 10:34:26.573583604 +0000 UTC m=+2710.906843597" watchObservedRunningTime="2025-09-30 10:34:26.573730548 +0000 UTC m=+2710.906990531" Sep 30 10:34:32 crc kubenswrapper[4730]: I0930 10:34:32.220250 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:32 crc kubenswrapper[4730]: I0930 10:34:32.220850 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:32 crc kubenswrapper[4730]: I0930 10:34:32.268853 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:32 crc kubenswrapper[4730]: I0930 10:34:32.336689 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:34:32 crc kubenswrapper[4730]: I0930 10:34:32.336739 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:34:32 crc kubenswrapper[4730]: I0930 10:34:32.336788 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 10:34:32 crc kubenswrapper[4730]: I0930 10:34:32.337832 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fca465fdb97ac9d9a109d35ebeee9e761de6e92d75613c1d72ae65f001b89f7b"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 10:34:32 crc kubenswrapper[4730]: I0930 10:34:32.337901 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://fca465fdb97ac9d9a109d35ebeee9e761de6e92d75613c1d72ae65f001b89f7b" gracePeriod=600 Sep 30 10:34:32 crc kubenswrapper[4730]: I0930 10:34:32.607113 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="fca465fdb97ac9d9a109d35ebeee9e761de6e92d75613c1d72ae65f001b89f7b" exitCode=0 Sep 30 10:34:32 crc kubenswrapper[4730]: I0930 10:34:32.607192 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"fca465fdb97ac9d9a109d35ebeee9e761de6e92d75613c1d72ae65f001b89f7b"} Sep 30 10:34:32 crc kubenswrapper[4730]: I0930 10:34:32.608159 4730 scope.go:117] "RemoveContainer" containerID="9f6faea1f020203ed696b478f58ffd89607071ce35adf31ee047b8ef2439d347" Sep 30 10:34:32 crc kubenswrapper[4730]: I0930 10:34:32.667416 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:32 crc kubenswrapper[4730]: I0930 10:34:32.725478 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qm56b"] Sep 30 10:34:33 crc kubenswrapper[4730]: I0930 10:34:33.620489 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0"} Sep 30 10:34:34 crc kubenswrapper[4730]: I0930 10:34:34.628008 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qm56b" podUID="dad5cfca-fcca-4fda-9578-b88478e33aa8" containerName="registry-server" containerID="cri-o://a8517fa35fd5acbe42df9481b4882c11df486a486f7055f7e6f2b9ce9e28d58a" gracePeriod=2 Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.092668 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.193994 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gm9pp\" (UniqueName: \"kubernetes.io/projected/dad5cfca-fcca-4fda-9578-b88478e33aa8-kube-api-access-gm9pp\") pod \"dad5cfca-fcca-4fda-9578-b88478e33aa8\" (UID: \"dad5cfca-fcca-4fda-9578-b88478e33aa8\") " Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.194083 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dad5cfca-fcca-4fda-9578-b88478e33aa8-catalog-content\") pod \"dad5cfca-fcca-4fda-9578-b88478e33aa8\" (UID: \"dad5cfca-fcca-4fda-9578-b88478e33aa8\") " Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.194226 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dad5cfca-fcca-4fda-9578-b88478e33aa8-utilities\") pod \"dad5cfca-fcca-4fda-9578-b88478e33aa8\" (UID: \"dad5cfca-fcca-4fda-9578-b88478e33aa8\") " Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.195250 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dad5cfca-fcca-4fda-9578-b88478e33aa8-utilities" (OuterVolumeSpecName: "utilities") pod "dad5cfca-fcca-4fda-9578-b88478e33aa8" (UID: "dad5cfca-fcca-4fda-9578-b88478e33aa8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.200875 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dad5cfca-fcca-4fda-9578-b88478e33aa8-kube-api-access-gm9pp" (OuterVolumeSpecName: "kube-api-access-gm9pp") pod "dad5cfca-fcca-4fda-9578-b88478e33aa8" (UID: "dad5cfca-fcca-4fda-9578-b88478e33aa8"). InnerVolumeSpecName "kube-api-access-gm9pp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.247391 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dad5cfca-fcca-4fda-9578-b88478e33aa8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dad5cfca-fcca-4fda-9578-b88478e33aa8" (UID: "dad5cfca-fcca-4fda-9578-b88478e33aa8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.296795 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dad5cfca-fcca-4fda-9578-b88478e33aa8-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.296848 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gm9pp\" (UniqueName: \"kubernetes.io/projected/dad5cfca-fcca-4fda-9578-b88478e33aa8-kube-api-access-gm9pp\") on node \"crc\" DevicePath \"\"" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.296863 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dad5cfca-fcca-4fda-9578-b88478e33aa8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.637217 4730 generic.go:334] "Generic (PLEG): container finished" podID="dad5cfca-fcca-4fda-9578-b88478e33aa8" containerID="a8517fa35fd5acbe42df9481b4882c11df486a486f7055f7e6f2b9ce9e28d58a" exitCode=0 Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.637473 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qm56b" event={"ID":"dad5cfca-fcca-4fda-9578-b88478e33aa8","Type":"ContainerDied","Data":"a8517fa35fd5acbe42df9481b4882c11df486a486f7055f7e6f2b9ce9e28d58a"} Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.637497 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qm56b" event={"ID":"dad5cfca-fcca-4fda-9578-b88478e33aa8","Type":"ContainerDied","Data":"f51af58b516af76dce24106b10a0fdeb800886ba05cda14eb0573758e9000361"} Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.637514 4730 scope.go:117] "RemoveContainer" containerID="a8517fa35fd5acbe42df9481b4882c11df486a486f7055f7e6f2b9ce9e28d58a" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.637649 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qm56b" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.664393 4730 scope.go:117] "RemoveContainer" containerID="83c6c488121ddb315d99eecb391fcfd8d4a84120d4abdd892a51306f0c02adac" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.672516 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qm56b"] Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.704796 4730 scope.go:117] "RemoveContainer" containerID="bc6f94719cbb645d5442ead7eaf4fa0d2a6afd293b32ca1a4752e35e172f723e" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.705964 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qm56b"] Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.756633 4730 scope.go:117] "RemoveContainer" containerID="a8517fa35fd5acbe42df9481b4882c11df486a486f7055f7e6f2b9ce9e28d58a" Sep 30 10:34:35 crc kubenswrapper[4730]: E0930 10:34:35.763241 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8517fa35fd5acbe42df9481b4882c11df486a486f7055f7e6f2b9ce9e28d58a\": container with ID starting with a8517fa35fd5acbe42df9481b4882c11df486a486f7055f7e6f2b9ce9e28d58a not found: ID does not exist" containerID="a8517fa35fd5acbe42df9481b4882c11df486a486f7055f7e6f2b9ce9e28d58a" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.763318 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8517fa35fd5acbe42df9481b4882c11df486a486f7055f7e6f2b9ce9e28d58a"} err="failed to get container status \"a8517fa35fd5acbe42df9481b4882c11df486a486f7055f7e6f2b9ce9e28d58a\": rpc error: code = NotFound desc = could not find container \"a8517fa35fd5acbe42df9481b4882c11df486a486f7055f7e6f2b9ce9e28d58a\": container with ID starting with a8517fa35fd5acbe42df9481b4882c11df486a486f7055f7e6f2b9ce9e28d58a not found: ID does not exist" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.763349 4730 scope.go:117] "RemoveContainer" containerID="83c6c488121ddb315d99eecb391fcfd8d4a84120d4abdd892a51306f0c02adac" Sep 30 10:34:35 crc kubenswrapper[4730]: E0930 10:34:35.767921 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83c6c488121ddb315d99eecb391fcfd8d4a84120d4abdd892a51306f0c02adac\": container with ID starting with 83c6c488121ddb315d99eecb391fcfd8d4a84120d4abdd892a51306f0c02adac not found: ID does not exist" containerID="83c6c488121ddb315d99eecb391fcfd8d4a84120d4abdd892a51306f0c02adac" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.767974 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83c6c488121ddb315d99eecb391fcfd8d4a84120d4abdd892a51306f0c02adac"} err="failed to get container status \"83c6c488121ddb315d99eecb391fcfd8d4a84120d4abdd892a51306f0c02adac\": rpc error: code = NotFound desc = could not find container \"83c6c488121ddb315d99eecb391fcfd8d4a84120d4abdd892a51306f0c02adac\": container with ID starting with 83c6c488121ddb315d99eecb391fcfd8d4a84120d4abdd892a51306f0c02adac not found: ID does not exist" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.768006 4730 scope.go:117] "RemoveContainer" containerID="bc6f94719cbb645d5442ead7eaf4fa0d2a6afd293b32ca1a4752e35e172f723e" Sep 30 10:34:35 crc kubenswrapper[4730]: E0930 10:34:35.769548 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc6f94719cbb645d5442ead7eaf4fa0d2a6afd293b32ca1a4752e35e172f723e\": container with ID starting with bc6f94719cbb645d5442ead7eaf4fa0d2a6afd293b32ca1a4752e35e172f723e not found: ID does not exist" containerID="bc6f94719cbb645d5442ead7eaf4fa0d2a6afd293b32ca1a4752e35e172f723e" Sep 30 10:34:35 crc kubenswrapper[4730]: I0930 10:34:35.769581 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc6f94719cbb645d5442ead7eaf4fa0d2a6afd293b32ca1a4752e35e172f723e"} err="failed to get container status \"bc6f94719cbb645d5442ead7eaf4fa0d2a6afd293b32ca1a4752e35e172f723e\": rpc error: code = NotFound desc = could not find container \"bc6f94719cbb645d5442ead7eaf4fa0d2a6afd293b32ca1a4752e35e172f723e\": container with ID starting with bc6f94719cbb645d5442ead7eaf4fa0d2a6afd293b32ca1a4752e35e172f723e not found: ID does not exist" Sep 30 10:34:36 crc kubenswrapper[4730]: I0930 10:34:36.394555 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dad5cfca-fcca-4fda-9578-b88478e33aa8" path="/var/lib/kubelet/pods/dad5cfca-fcca-4fda-9578-b88478e33aa8/volumes" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.080006 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4llnf"] Sep 30 10:35:57 crc kubenswrapper[4730]: E0930 10:35:57.080951 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dad5cfca-fcca-4fda-9578-b88478e33aa8" containerName="registry-server" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.080967 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="dad5cfca-fcca-4fda-9578-b88478e33aa8" containerName="registry-server" Sep 30 10:35:57 crc kubenswrapper[4730]: E0930 10:35:57.080976 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dad5cfca-fcca-4fda-9578-b88478e33aa8" containerName="extract-utilities" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.080982 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="dad5cfca-fcca-4fda-9578-b88478e33aa8" containerName="extract-utilities" Sep 30 10:35:57 crc kubenswrapper[4730]: E0930 10:35:57.080995 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dad5cfca-fcca-4fda-9578-b88478e33aa8" containerName="extract-content" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.081001 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="dad5cfca-fcca-4fda-9578-b88478e33aa8" containerName="extract-content" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.081180 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="dad5cfca-fcca-4fda-9578-b88478e33aa8" containerName="registry-server" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.082472 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.131455 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4llnf"] Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.175964 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfkb7\" (UniqueName: \"kubernetes.io/projected/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-kube-api-access-mfkb7\") pod \"certified-operators-4llnf\" (UID: \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\") " pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.176022 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-utilities\") pod \"certified-operators-4llnf\" (UID: \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\") " pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.176127 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-catalog-content\") pod \"certified-operators-4llnf\" (UID: \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\") " pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.277529 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfkb7\" (UniqueName: \"kubernetes.io/projected/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-kube-api-access-mfkb7\") pod \"certified-operators-4llnf\" (UID: \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\") " pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.277584 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-utilities\") pod \"certified-operators-4llnf\" (UID: \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\") " pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.277739 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-catalog-content\") pod \"certified-operators-4llnf\" (UID: \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\") " pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.278217 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-catalog-content\") pod \"certified-operators-4llnf\" (UID: \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\") " pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.278275 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-utilities\") pod \"certified-operators-4llnf\" (UID: \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\") " pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.296771 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfkb7\" (UniqueName: \"kubernetes.io/projected/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-kube-api-access-mfkb7\") pod \"certified-operators-4llnf\" (UID: \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\") " pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:35:57 crc kubenswrapper[4730]: I0930 10:35:57.452057 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:35:58 crc kubenswrapper[4730]: I0930 10:35:58.003523 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4llnf"] Sep 30 10:35:58 crc kubenswrapper[4730]: I0930 10:35:58.381453 4730 generic.go:334] "Generic (PLEG): container finished" podID="9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" containerID="c28207ea82c45cac140d29aca2fb0a2c5e7947dcb14f647f3f9d1865bbe948f9" exitCode=0 Sep 30 10:35:58 crc kubenswrapper[4730]: I0930 10:35:58.391247 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4llnf" event={"ID":"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea","Type":"ContainerDied","Data":"c28207ea82c45cac140d29aca2fb0a2c5e7947dcb14f647f3f9d1865bbe948f9"} Sep 30 10:35:58 crc kubenswrapper[4730]: I0930 10:35:58.391293 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4llnf" event={"ID":"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea","Type":"ContainerStarted","Data":"018e82939ec5680685664378ffefac0fb272b402c26d805df07af661f55c086c"} Sep 30 10:35:59 crc kubenswrapper[4730]: I0930 10:35:59.391388 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4llnf" event={"ID":"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea","Type":"ContainerStarted","Data":"1fa62bf07a6b28b9c27aac619aea68b5c4a0975262e49228c070f9b8b2cf9e44"} Sep 30 10:36:00 crc kubenswrapper[4730]: I0930 10:36:00.401388 4730 generic.go:334] "Generic (PLEG): container finished" podID="9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" containerID="1fa62bf07a6b28b9c27aac619aea68b5c4a0975262e49228c070f9b8b2cf9e44" exitCode=0 Sep 30 10:36:00 crc kubenswrapper[4730]: I0930 10:36:00.401443 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4llnf" event={"ID":"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea","Type":"ContainerDied","Data":"1fa62bf07a6b28b9c27aac619aea68b5c4a0975262e49228c070f9b8b2cf9e44"} Sep 30 10:36:01 crc kubenswrapper[4730]: I0930 10:36:01.427289 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4llnf" event={"ID":"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea","Type":"ContainerStarted","Data":"d4105923776ca86d902f84e711f85f28351b41100fe52389d810ea4251022d74"} Sep 30 10:36:01 crc kubenswrapper[4730]: I0930 10:36:01.456630 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4llnf" podStartSLOduration=2.046982678 podStartE2EDuration="4.456593883s" podCreationTimestamp="2025-09-30 10:35:57 +0000 UTC" firstStartedPulling="2025-09-30 10:35:58.383890696 +0000 UTC m=+2802.717150689" lastFinishedPulling="2025-09-30 10:36:00.793501901 +0000 UTC m=+2805.126761894" observedRunningTime="2025-09-30 10:36:01.446924591 +0000 UTC m=+2805.780184584" watchObservedRunningTime="2025-09-30 10:36:01.456593883 +0000 UTC m=+2805.789853876" Sep 30 10:36:07 crc kubenswrapper[4730]: I0930 10:36:07.452818 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:36:07 crc kubenswrapper[4730]: I0930 10:36:07.453308 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:36:07 crc kubenswrapper[4730]: I0930 10:36:07.507177 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:36:07 crc kubenswrapper[4730]: I0930 10:36:07.555884 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:36:07 crc kubenswrapper[4730]: I0930 10:36:07.738311 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4llnf"] Sep 30 10:36:09 crc kubenswrapper[4730]: I0930 10:36:09.495653 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4llnf" podUID="9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" containerName="registry-server" containerID="cri-o://d4105923776ca86d902f84e711f85f28351b41100fe52389d810ea4251022d74" gracePeriod=2 Sep 30 10:36:09 crc kubenswrapper[4730]: I0930 10:36:09.983193 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.004403 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfkb7\" (UniqueName: \"kubernetes.io/projected/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-kube-api-access-mfkb7\") pod \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\" (UID: \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\") " Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.011693 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-kube-api-access-mfkb7" (OuterVolumeSpecName: "kube-api-access-mfkb7") pod "9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" (UID: "9afb173b-6e5f-46d7-ba31-20c0b5fe7eea"). InnerVolumeSpecName "kube-api-access-mfkb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.105921 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-utilities\") pod \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\" (UID: \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\") " Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.106033 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-catalog-content\") pod \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\" (UID: \"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea\") " Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.106294 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfkb7\" (UniqueName: \"kubernetes.io/projected/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-kube-api-access-mfkb7\") on node \"crc\" DevicePath \"\"" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.106807 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-utilities" (OuterVolumeSpecName: "utilities") pod "9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" (UID: "9afb173b-6e5f-46d7-ba31-20c0b5fe7eea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.144079 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" (UID: "9afb173b-6e5f-46d7-ba31-20c0b5fe7eea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.208157 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.208194 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.506862 4730 generic.go:334] "Generic (PLEG): container finished" podID="9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" containerID="d4105923776ca86d902f84e711f85f28351b41100fe52389d810ea4251022d74" exitCode=0 Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.506922 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4llnf" event={"ID":"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea","Type":"ContainerDied","Data":"d4105923776ca86d902f84e711f85f28351b41100fe52389d810ea4251022d74"} Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.506951 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4llnf" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.506975 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4llnf" event={"ID":"9afb173b-6e5f-46d7-ba31-20c0b5fe7eea","Type":"ContainerDied","Data":"018e82939ec5680685664378ffefac0fb272b402c26d805df07af661f55c086c"} Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.507001 4730 scope.go:117] "RemoveContainer" containerID="d4105923776ca86d902f84e711f85f28351b41100fe52389d810ea4251022d74" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.536022 4730 scope.go:117] "RemoveContainer" containerID="1fa62bf07a6b28b9c27aac619aea68b5c4a0975262e49228c070f9b8b2cf9e44" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.536933 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4llnf"] Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.545156 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4llnf"] Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.558891 4730 scope.go:117] "RemoveContainer" containerID="c28207ea82c45cac140d29aca2fb0a2c5e7947dcb14f647f3f9d1865bbe948f9" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.598202 4730 scope.go:117] "RemoveContainer" containerID="d4105923776ca86d902f84e711f85f28351b41100fe52389d810ea4251022d74" Sep 30 10:36:10 crc kubenswrapper[4730]: E0930 10:36:10.598674 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4105923776ca86d902f84e711f85f28351b41100fe52389d810ea4251022d74\": container with ID starting with d4105923776ca86d902f84e711f85f28351b41100fe52389d810ea4251022d74 not found: ID does not exist" containerID="d4105923776ca86d902f84e711f85f28351b41100fe52389d810ea4251022d74" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.598722 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4105923776ca86d902f84e711f85f28351b41100fe52389d810ea4251022d74"} err="failed to get container status \"d4105923776ca86d902f84e711f85f28351b41100fe52389d810ea4251022d74\": rpc error: code = NotFound desc = could not find container \"d4105923776ca86d902f84e711f85f28351b41100fe52389d810ea4251022d74\": container with ID starting with d4105923776ca86d902f84e711f85f28351b41100fe52389d810ea4251022d74 not found: ID does not exist" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.598758 4730 scope.go:117] "RemoveContainer" containerID="1fa62bf07a6b28b9c27aac619aea68b5c4a0975262e49228c070f9b8b2cf9e44" Sep 30 10:36:10 crc kubenswrapper[4730]: E0930 10:36:10.599059 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fa62bf07a6b28b9c27aac619aea68b5c4a0975262e49228c070f9b8b2cf9e44\": container with ID starting with 1fa62bf07a6b28b9c27aac619aea68b5c4a0975262e49228c070f9b8b2cf9e44 not found: ID does not exist" containerID="1fa62bf07a6b28b9c27aac619aea68b5c4a0975262e49228c070f9b8b2cf9e44" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.599105 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fa62bf07a6b28b9c27aac619aea68b5c4a0975262e49228c070f9b8b2cf9e44"} err="failed to get container status \"1fa62bf07a6b28b9c27aac619aea68b5c4a0975262e49228c070f9b8b2cf9e44\": rpc error: code = NotFound desc = could not find container \"1fa62bf07a6b28b9c27aac619aea68b5c4a0975262e49228c070f9b8b2cf9e44\": container with ID starting with 1fa62bf07a6b28b9c27aac619aea68b5c4a0975262e49228c070f9b8b2cf9e44 not found: ID does not exist" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.599143 4730 scope.go:117] "RemoveContainer" containerID="c28207ea82c45cac140d29aca2fb0a2c5e7947dcb14f647f3f9d1865bbe948f9" Sep 30 10:36:10 crc kubenswrapper[4730]: E0930 10:36:10.599421 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c28207ea82c45cac140d29aca2fb0a2c5e7947dcb14f647f3f9d1865bbe948f9\": container with ID starting with c28207ea82c45cac140d29aca2fb0a2c5e7947dcb14f647f3f9d1865bbe948f9 not found: ID does not exist" containerID="c28207ea82c45cac140d29aca2fb0a2c5e7947dcb14f647f3f9d1865bbe948f9" Sep 30 10:36:10 crc kubenswrapper[4730]: I0930 10:36:10.599452 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c28207ea82c45cac140d29aca2fb0a2c5e7947dcb14f647f3f9d1865bbe948f9"} err="failed to get container status \"c28207ea82c45cac140d29aca2fb0a2c5e7947dcb14f647f3f9d1865bbe948f9\": rpc error: code = NotFound desc = could not find container \"c28207ea82c45cac140d29aca2fb0a2c5e7947dcb14f647f3f9d1865bbe948f9\": container with ID starting with c28207ea82c45cac140d29aca2fb0a2c5e7947dcb14f647f3f9d1865bbe948f9 not found: ID does not exist" Sep 30 10:36:12 crc kubenswrapper[4730]: I0930 10:36:12.393790 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" path="/var/lib/kubelet/pods/9afb173b-6e5f-46d7-ba31-20c0b5fe7eea/volumes" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.233800 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hj6fs"] Sep 30 10:36:22 crc kubenswrapper[4730]: E0930 10:36:22.234738 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" containerName="extract-utilities" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.234752 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" containerName="extract-utilities" Sep 30 10:36:22 crc kubenswrapper[4730]: E0930 10:36:22.234778 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" containerName="registry-server" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.234785 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" containerName="registry-server" Sep 30 10:36:22 crc kubenswrapper[4730]: E0930 10:36:22.234799 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" containerName="extract-content" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.234806 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" containerName="extract-content" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.234994 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="9afb173b-6e5f-46d7-ba31-20c0b5fe7eea" containerName="registry-server" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.236374 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.243495 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hj6fs"] Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.246441 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-catalog-content\") pod \"redhat-marketplace-hj6fs\" (UID: \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\") " pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.246520 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-utilities\") pod \"redhat-marketplace-hj6fs\" (UID: \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\") " pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.246743 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msb5z\" (UniqueName: \"kubernetes.io/projected/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-kube-api-access-msb5z\") pod \"redhat-marketplace-hj6fs\" (UID: \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\") " pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.348700 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-utilities\") pod \"redhat-marketplace-hj6fs\" (UID: \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\") " pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.349154 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msb5z\" (UniqueName: \"kubernetes.io/projected/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-kube-api-access-msb5z\") pod \"redhat-marketplace-hj6fs\" (UID: \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\") " pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.349200 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-catalog-content\") pod \"redhat-marketplace-hj6fs\" (UID: \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\") " pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.349659 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-catalog-content\") pod \"redhat-marketplace-hj6fs\" (UID: \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\") " pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.349747 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-utilities\") pod \"redhat-marketplace-hj6fs\" (UID: \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\") " pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.383816 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msb5z\" (UniqueName: \"kubernetes.io/projected/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-kube-api-access-msb5z\") pod \"redhat-marketplace-hj6fs\" (UID: \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\") " pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:22 crc kubenswrapper[4730]: I0930 10:36:22.602192 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:23 crc kubenswrapper[4730]: I0930 10:36:23.072959 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hj6fs"] Sep 30 10:36:23 crc kubenswrapper[4730]: I0930 10:36:23.614306 4730 generic.go:334] "Generic (PLEG): container finished" podID="93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" containerID="3c1f4658b8417175074fb6666d2001f879029b683dd4107a9c9651fbd924ff0f" exitCode=0 Sep 30 10:36:23 crc kubenswrapper[4730]: I0930 10:36:23.614360 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hj6fs" event={"ID":"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5","Type":"ContainerDied","Data":"3c1f4658b8417175074fb6666d2001f879029b683dd4107a9c9651fbd924ff0f"} Sep 30 10:36:23 crc kubenswrapper[4730]: I0930 10:36:23.614648 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hj6fs" event={"ID":"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5","Type":"ContainerStarted","Data":"53b65b4dd48e7803b848606fea1471e04d668ae6b9ea31278b484d74e1a91c05"} Sep 30 10:36:24 crc kubenswrapper[4730]: I0930 10:36:24.625465 4730 generic.go:334] "Generic (PLEG): container finished" podID="93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" containerID="681428982e43a540f287f5df800eef6725a4a6021f487825cd021e1c76055290" exitCode=0 Sep 30 10:36:24 crc kubenswrapper[4730]: I0930 10:36:24.625542 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hj6fs" event={"ID":"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5","Type":"ContainerDied","Data":"681428982e43a540f287f5df800eef6725a4a6021f487825cd021e1c76055290"} Sep 30 10:36:25 crc kubenswrapper[4730]: I0930 10:36:25.637642 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hj6fs" event={"ID":"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5","Type":"ContainerStarted","Data":"c8ec3288b7272239a84cd18233dc58302b0fdc0933b9c1b17731ed143c21bea6"} Sep 30 10:36:25 crc kubenswrapper[4730]: I0930 10:36:25.660510 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hj6fs" podStartSLOduration=2.046018726 podStartE2EDuration="3.660494224s" podCreationTimestamp="2025-09-30 10:36:22 +0000 UTC" firstStartedPulling="2025-09-30 10:36:23.616429436 +0000 UTC m=+2827.949689429" lastFinishedPulling="2025-09-30 10:36:25.230904914 +0000 UTC m=+2829.564164927" observedRunningTime="2025-09-30 10:36:25.659095517 +0000 UTC m=+2829.992355520" watchObservedRunningTime="2025-09-30 10:36:25.660494224 +0000 UTC m=+2829.993754207" Sep 30 10:36:32 crc kubenswrapper[4730]: I0930 10:36:32.336944 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:36:32 crc kubenswrapper[4730]: I0930 10:36:32.337405 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:36:32 crc kubenswrapper[4730]: I0930 10:36:32.603271 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:32 crc kubenswrapper[4730]: I0930 10:36:32.603564 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:32 crc kubenswrapper[4730]: I0930 10:36:32.683459 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:32 crc kubenswrapper[4730]: I0930 10:36:32.801944 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:32 crc kubenswrapper[4730]: I0930 10:36:32.924092 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hj6fs"] Sep 30 10:36:34 crc kubenswrapper[4730]: I0930 10:36:34.758559 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hj6fs" podUID="93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" containerName="registry-server" containerID="cri-o://c8ec3288b7272239a84cd18233dc58302b0fdc0933b9c1b17731ed143c21bea6" gracePeriod=2 Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.240124 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.393047 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-utilities\") pod \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\" (UID: \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\") " Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.393113 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-catalog-content\") pod \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\" (UID: \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\") " Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.393328 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msb5z\" (UniqueName: \"kubernetes.io/projected/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-kube-api-access-msb5z\") pod \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\" (UID: \"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5\") " Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.393990 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-utilities" (OuterVolumeSpecName: "utilities") pod "93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" (UID: "93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.407004 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" (UID: "93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.407834 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-kube-api-access-msb5z" (OuterVolumeSpecName: "kube-api-access-msb5z") pod "93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" (UID: "93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5"). InnerVolumeSpecName "kube-api-access-msb5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.495892 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msb5z\" (UniqueName: \"kubernetes.io/projected/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-kube-api-access-msb5z\") on node \"crc\" DevicePath \"\"" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.495932 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.495943 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.768257 4730 generic.go:334] "Generic (PLEG): container finished" podID="93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" containerID="c8ec3288b7272239a84cd18233dc58302b0fdc0933b9c1b17731ed143c21bea6" exitCode=0 Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.768302 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hj6fs" event={"ID":"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5","Type":"ContainerDied","Data":"c8ec3288b7272239a84cd18233dc58302b0fdc0933b9c1b17731ed143c21bea6"} Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.768328 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hj6fs" event={"ID":"93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5","Type":"ContainerDied","Data":"53b65b4dd48e7803b848606fea1471e04d668ae6b9ea31278b484d74e1a91c05"} Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.768351 4730 scope.go:117] "RemoveContainer" containerID="c8ec3288b7272239a84cd18233dc58302b0fdc0933b9c1b17731ed143c21bea6" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.769520 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hj6fs" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.797780 4730 scope.go:117] "RemoveContainer" containerID="681428982e43a540f287f5df800eef6725a4a6021f487825cd021e1c76055290" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.802847 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hj6fs"] Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.810379 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hj6fs"] Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.836229 4730 scope.go:117] "RemoveContainer" containerID="3c1f4658b8417175074fb6666d2001f879029b683dd4107a9c9651fbd924ff0f" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.867998 4730 scope.go:117] "RemoveContainer" containerID="c8ec3288b7272239a84cd18233dc58302b0fdc0933b9c1b17731ed143c21bea6" Sep 30 10:36:35 crc kubenswrapper[4730]: E0930 10:36:35.868996 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8ec3288b7272239a84cd18233dc58302b0fdc0933b9c1b17731ed143c21bea6\": container with ID starting with c8ec3288b7272239a84cd18233dc58302b0fdc0933b9c1b17731ed143c21bea6 not found: ID does not exist" containerID="c8ec3288b7272239a84cd18233dc58302b0fdc0933b9c1b17731ed143c21bea6" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.869189 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8ec3288b7272239a84cd18233dc58302b0fdc0933b9c1b17731ed143c21bea6"} err="failed to get container status \"c8ec3288b7272239a84cd18233dc58302b0fdc0933b9c1b17731ed143c21bea6\": rpc error: code = NotFound desc = could not find container \"c8ec3288b7272239a84cd18233dc58302b0fdc0933b9c1b17731ed143c21bea6\": container with ID starting with c8ec3288b7272239a84cd18233dc58302b0fdc0933b9c1b17731ed143c21bea6 not found: ID does not exist" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.869270 4730 scope.go:117] "RemoveContainer" containerID="681428982e43a540f287f5df800eef6725a4a6021f487825cd021e1c76055290" Sep 30 10:36:35 crc kubenswrapper[4730]: E0930 10:36:35.869741 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"681428982e43a540f287f5df800eef6725a4a6021f487825cd021e1c76055290\": container with ID starting with 681428982e43a540f287f5df800eef6725a4a6021f487825cd021e1c76055290 not found: ID does not exist" containerID="681428982e43a540f287f5df800eef6725a4a6021f487825cd021e1c76055290" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.869771 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"681428982e43a540f287f5df800eef6725a4a6021f487825cd021e1c76055290"} err="failed to get container status \"681428982e43a540f287f5df800eef6725a4a6021f487825cd021e1c76055290\": rpc error: code = NotFound desc = could not find container \"681428982e43a540f287f5df800eef6725a4a6021f487825cd021e1c76055290\": container with ID starting with 681428982e43a540f287f5df800eef6725a4a6021f487825cd021e1c76055290 not found: ID does not exist" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.869794 4730 scope.go:117] "RemoveContainer" containerID="3c1f4658b8417175074fb6666d2001f879029b683dd4107a9c9651fbd924ff0f" Sep 30 10:36:35 crc kubenswrapper[4730]: E0930 10:36:35.870001 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c1f4658b8417175074fb6666d2001f879029b683dd4107a9c9651fbd924ff0f\": container with ID starting with 3c1f4658b8417175074fb6666d2001f879029b683dd4107a9c9651fbd924ff0f not found: ID does not exist" containerID="3c1f4658b8417175074fb6666d2001f879029b683dd4107a9c9651fbd924ff0f" Sep 30 10:36:35 crc kubenswrapper[4730]: I0930 10:36:35.870086 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c1f4658b8417175074fb6666d2001f879029b683dd4107a9c9651fbd924ff0f"} err="failed to get container status \"3c1f4658b8417175074fb6666d2001f879029b683dd4107a9c9651fbd924ff0f\": rpc error: code = NotFound desc = could not find container \"3c1f4658b8417175074fb6666d2001f879029b683dd4107a9c9651fbd924ff0f\": container with ID starting with 3c1f4658b8417175074fb6666d2001f879029b683dd4107a9c9651fbd924ff0f not found: ID does not exist" Sep 30 10:36:36 crc kubenswrapper[4730]: I0930 10:36:36.412386 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" path="/var/lib/kubelet/pods/93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5/volumes" Sep 30 10:37:02 crc kubenswrapper[4730]: I0930 10:37:02.013459 4730 generic.go:334] "Generic (PLEG): container finished" podID="114c62cf-b040-491e-90fa-794b4cc29361" containerID="838cdf913c66c9634ca88fb52904fee56b0055f65240832d6796b2700dd0896f" exitCode=0 Sep 30 10:37:02 crc kubenswrapper[4730]: I0930 10:37:02.013700 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" event={"ID":"114c62cf-b040-491e-90fa-794b4cc29361","Type":"ContainerDied","Data":"838cdf913c66c9634ca88fb52904fee56b0055f65240832d6796b2700dd0896f"} Sep 30 10:37:02 crc kubenswrapper[4730]: I0930 10:37:02.336309 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:37:02 crc kubenswrapper[4730]: I0930 10:37:02.336367 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.428694 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.625531 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-libvirt-combined-ca-bundle\") pod \"114c62cf-b040-491e-90fa-794b4cc29361\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.625938 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-libvirt-secret-0\") pod \"114c62cf-b040-491e-90fa-794b4cc29361\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.626040 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ds8nw\" (UniqueName: \"kubernetes.io/projected/114c62cf-b040-491e-90fa-794b4cc29361-kube-api-access-ds8nw\") pod \"114c62cf-b040-491e-90fa-794b4cc29361\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.626091 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-inventory\") pod \"114c62cf-b040-491e-90fa-794b4cc29361\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.626126 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-ssh-key\") pod \"114c62cf-b040-491e-90fa-794b4cc29361\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.626204 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-ceph\") pod \"114c62cf-b040-491e-90fa-794b4cc29361\" (UID: \"114c62cf-b040-491e-90fa-794b4cc29361\") " Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.631967 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/114c62cf-b040-491e-90fa-794b4cc29361-kube-api-access-ds8nw" (OuterVolumeSpecName: "kube-api-access-ds8nw") pod "114c62cf-b040-491e-90fa-794b4cc29361" (UID: "114c62cf-b040-491e-90fa-794b4cc29361"). InnerVolumeSpecName "kube-api-access-ds8nw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.632363 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-ceph" (OuterVolumeSpecName: "ceph") pod "114c62cf-b040-491e-90fa-794b4cc29361" (UID: "114c62cf-b040-491e-90fa-794b4cc29361"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.632462 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "114c62cf-b040-491e-90fa-794b4cc29361" (UID: "114c62cf-b040-491e-90fa-794b4cc29361"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.658817 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "114c62cf-b040-491e-90fa-794b4cc29361" (UID: "114c62cf-b040-491e-90fa-794b4cc29361"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.659132 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "114c62cf-b040-491e-90fa-794b4cc29361" (UID: "114c62cf-b040-491e-90fa-794b4cc29361"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.665277 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-inventory" (OuterVolumeSpecName: "inventory") pod "114c62cf-b040-491e-90fa-794b4cc29361" (UID: "114c62cf-b040-491e-90fa-794b4cc29361"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.729111 4730 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.729158 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ds8nw\" (UniqueName: \"kubernetes.io/projected/114c62cf-b040-491e-90fa-794b4cc29361-kube-api-access-ds8nw\") on node \"crc\" DevicePath \"\"" Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.729180 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.729197 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.729213 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:37:03 crc kubenswrapper[4730]: I0930 10:37:03.729228 4730 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/114c62cf-b040-491e-90fa-794b4cc29361-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.035487 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" event={"ID":"114c62cf-b040-491e-90fa-794b4cc29361","Type":"ContainerDied","Data":"22fa61813b62c4ee2936b5cc01a0dd2b79b0362894b534b3643a44d2412ce060"} Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.035533 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22fa61813b62c4ee2936b5cc01a0dd2b79b0362894b534b3643a44d2412ce060" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.035555 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.183377 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk"] Sep 30 10:37:04 crc kubenswrapper[4730]: E0930 10:37:04.183753 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="114c62cf-b040-491e-90fa-794b4cc29361" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.183768 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="114c62cf-b040-491e-90fa-794b4cc29361" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 10:37:04 crc kubenswrapper[4730]: E0930 10:37:04.183791 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" containerName="extract-content" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.183797 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" containerName="extract-content" Sep 30 10:37:04 crc kubenswrapper[4730]: E0930 10:37:04.183809 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" containerName="extract-utilities" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.183815 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" containerName="extract-utilities" Sep 30 10:37:04 crc kubenswrapper[4730]: E0930 10:37:04.183832 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" containerName="registry-server" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.183837 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" containerName="registry-server" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.184029 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="114c62cf-b040-491e-90fa-794b4cc29361" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.184045 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="93d1dcc4-d30e-43fa-a9ab-59bdd60aa1c5" containerName="registry-server" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.184656 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.187894 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.188175 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.188766 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.189137 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ceph-nova" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.189293 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.189398 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.189898 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.190980 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.191847 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.193107 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk"] Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.339709 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.340054 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.340206 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/80c10bdf-95bb-4372-ba25-b7bd5f563225-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.340345 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.340494 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.340659 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.340769 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.340878 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.341015 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.341110 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.341218 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgnvk\" (UniqueName: \"kubernetes.io/projected/80c10bdf-95bb-4372-ba25-b7bd5f563225-kube-api-access-xgnvk\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.443154 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.443215 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.443241 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.443280 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.443672 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.444235 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.444277 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.444408 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgnvk\" (UniqueName: \"kubernetes.io/projected/80c10bdf-95bb-4372-ba25-b7bd5f563225-kube-api-access-xgnvk\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.444470 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.444505 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.444545 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/80c10bdf-95bb-4372-ba25-b7bd5f563225-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.444580 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.445861 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/80c10bdf-95bb-4372-ba25-b7bd5f563225-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.447432 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.447579 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.447626 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.447675 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.448076 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.448447 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.449223 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.450027 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.461942 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgnvk\" (UniqueName: \"kubernetes.io/projected/80c10bdf-95bb-4372-ba25-b7bd5f563225-kube-api-access-xgnvk\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.501770 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:37:04 crc kubenswrapper[4730]: I0930 10:37:04.978895 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk"] Sep 30 10:37:05 crc kubenswrapper[4730]: I0930 10:37:05.043800 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" event={"ID":"80c10bdf-95bb-4372-ba25-b7bd5f563225","Type":"ContainerStarted","Data":"49def117d8618da517dca6e9f31eba9d49e15cb1fbcb530f23731c98dceebcb8"} Sep 30 10:37:06 crc kubenswrapper[4730]: I0930 10:37:06.054748 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" event={"ID":"80c10bdf-95bb-4372-ba25-b7bd5f563225","Type":"ContainerStarted","Data":"1bd532bbba81a0a3b54ebf274ec3f6ea2b0757b07ab2058f8e6cf54ee9cdec3e"} Sep 30 10:37:06 crc kubenswrapper[4730]: I0930 10:37:06.075925 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" podStartSLOduration=1.6603219390000001 podStartE2EDuration="2.075901333s" podCreationTimestamp="2025-09-30 10:37:04 +0000 UTC" firstStartedPulling="2025-09-30 10:37:04.984579467 +0000 UTC m=+2869.317839460" lastFinishedPulling="2025-09-30 10:37:05.400158851 +0000 UTC m=+2869.733418854" observedRunningTime="2025-09-30 10:37:06.07078593 +0000 UTC m=+2870.404045943" watchObservedRunningTime="2025-09-30 10:37:06.075901333 +0000 UTC m=+2870.409161366" Sep 30 10:37:32 crc kubenswrapper[4730]: I0930 10:37:32.336938 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:37:32 crc kubenswrapper[4730]: I0930 10:37:32.337504 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:37:32 crc kubenswrapper[4730]: I0930 10:37:32.337550 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 10:37:32 crc kubenswrapper[4730]: I0930 10:37:32.338291 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 10:37:32 crc kubenswrapper[4730]: I0930 10:37:32.338334 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" gracePeriod=600 Sep 30 10:37:32 crc kubenswrapper[4730]: E0930 10:37:32.459056 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:37:33 crc kubenswrapper[4730]: I0930 10:37:33.339046 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" exitCode=0 Sep 30 10:37:33 crc kubenswrapper[4730]: I0930 10:37:33.339120 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0"} Sep 30 10:37:33 crc kubenswrapper[4730]: I0930 10:37:33.339473 4730 scope.go:117] "RemoveContainer" containerID="fca465fdb97ac9d9a109d35ebeee9e761de6e92d75613c1d72ae65f001b89f7b" Sep 30 10:37:33 crc kubenswrapper[4730]: I0930 10:37:33.340202 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:37:33 crc kubenswrapper[4730]: E0930 10:37:33.340577 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:37:46 crc kubenswrapper[4730]: I0930 10:37:46.389118 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:37:46 crc kubenswrapper[4730]: E0930 10:37:46.391066 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:38:01 crc kubenswrapper[4730]: I0930 10:38:01.381801 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:38:01 crc kubenswrapper[4730]: E0930 10:38:01.382678 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:38:12 crc kubenswrapper[4730]: I0930 10:38:12.380734 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:38:12 crc kubenswrapper[4730]: E0930 10:38:12.381602 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:38:23 crc kubenswrapper[4730]: I0930 10:38:23.380872 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:38:23 crc kubenswrapper[4730]: E0930 10:38:23.381975 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:38:36 crc kubenswrapper[4730]: I0930 10:38:36.387167 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:38:36 crc kubenswrapper[4730]: E0930 10:38:36.388131 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:38:47 crc kubenswrapper[4730]: I0930 10:38:47.381816 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:38:47 crc kubenswrapper[4730]: E0930 10:38:47.382864 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:38:59 crc kubenswrapper[4730]: I0930 10:38:59.381413 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:38:59 crc kubenswrapper[4730]: E0930 10:38:59.383387 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:39:12 crc kubenswrapper[4730]: I0930 10:39:12.381877 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:39:12 crc kubenswrapper[4730]: E0930 10:39:12.384177 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:39:27 crc kubenswrapper[4730]: I0930 10:39:27.380903 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:39:27 crc kubenswrapper[4730]: E0930 10:39:27.381823 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:39:40 crc kubenswrapper[4730]: I0930 10:39:40.381965 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:39:40 crc kubenswrapper[4730]: E0930 10:39:40.382875 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:39:54 crc kubenswrapper[4730]: I0930 10:39:54.384810 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:39:54 crc kubenswrapper[4730]: E0930 10:39:54.404175 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:40:09 crc kubenswrapper[4730]: I0930 10:40:09.380979 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:40:09 crc kubenswrapper[4730]: E0930 10:40:09.381751 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:40:20 crc kubenswrapper[4730]: I0930 10:40:20.380923 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:40:20 crc kubenswrapper[4730]: E0930 10:40:20.381821 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:40:32 crc kubenswrapper[4730]: I0930 10:40:32.918074 4730 generic.go:334] "Generic (PLEG): container finished" podID="80c10bdf-95bb-4372-ba25-b7bd5f563225" containerID="1bd532bbba81a0a3b54ebf274ec3f6ea2b0757b07ab2058f8e6cf54ee9cdec3e" exitCode=0 Sep 30 10:40:32 crc kubenswrapper[4730]: I0930 10:40:32.918304 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" event={"ID":"80c10bdf-95bb-4372-ba25-b7bd5f563225","Type":"ContainerDied","Data":"1bd532bbba81a0a3b54ebf274ec3f6ea2b0757b07ab2058f8e6cf54ee9cdec3e"} Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.422076 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.514248 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-extra-config-0\") pod \"80c10bdf-95bb-4372-ba25-b7bd5f563225\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.514682 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-ssh-key\") pod \"80c10bdf-95bb-4372-ba25-b7bd5f563225\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.514707 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-migration-ssh-key-0\") pod \"80c10bdf-95bb-4372-ba25-b7bd5f563225\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.514731 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/80c10bdf-95bb-4372-ba25-b7bd5f563225-ceph-nova-0\") pod \"80c10bdf-95bb-4372-ba25-b7bd5f563225\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.514749 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-ceph\") pod \"80c10bdf-95bb-4372-ba25-b7bd5f563225\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.514803 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-cell1-compute-config-0\") pod \"80c10bdf-95bb-4372-ba25-b7bd5f563225\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.514900 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-cell1-compute-config-1\") pod \"80c10bdf-95bb-4372-ba25-b7bd5f563225\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.514972 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgnvk\" (UniqueName: \"kubernetes.io/projected/80c10bdf-95bb-4372-ba25-b7bd5f563225-kube-api-access-xgnvk\") pod \"80c10bdf-95bb-4372-ba25-b7bd5f563225\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.515007 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-custom-ceph-combined-ca-bundle\") pod \"80c10bdf-95bb-4372-ba25-b7bd5f563225\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.515028 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-migration-ssh-key-1\") pod \"80c10bdf-95bb-4372-ba25-b7bd5f563225\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.515546 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-inventory\") pod \"80c10bdf-95bb-4372-ba25-b7bd5f563225\" (UID: \"80c10bdf-95bb-4372-ba25-b7bd5f563225\") " Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.520695 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-ceph" (OuterVolumeSpecName: "ceph") pod "80c10bdf-95bb-4372-ba25-b7bd5f563225" (UID: "80c10bdf-95bb-4372-ba25-b7bd5f563225"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.520980 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80c10bdf-95bb-4372-ba25-b7bd5f563225-kube-api-access-xgnvk" (OuterVolumeSpecName: "kube-api-access-xgnvk") pod "80c10bdf-95bb-4372-ba25-b7bd5f563225" (UID: "80c10bdf-95bb-4372-ba25-b7bd5f563225"). InnerVolumeSpecName "kube-api-access-xgnvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.527624 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-custom-ceph-combined-ca-bundle" (OuterVolumeSpecName: "nova-custom-ceph-combined-ca-bundle") pod "80c10bdf-95bb-4372-ba25-b7bd5f563225" (UID: "80c10bdf-95bb-4372-ba25-b7bd5f563225"). InnerVolumeSpecName "nova-custom-ceph-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.543368 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "80c10bdf-95bb-4372-ba25-b7bd5f563225" (UID: "80c10bdf-95bb-4372-ba25-b7bd5f563225"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.544467 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "80c10bdf-95bb-4372-ba25-b7bd5f563225" (UID: "80c10bdf-95bb-4372-ba25-b7bd5f563225"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.552059 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "80c10bdf-95bb-4372-ba25-b7bd5f563225" (UID: "80c10bdf-95bb-4372-ba25-b7bd5f563225"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.554031 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80c10bdf-95bb-4372-ba25-b7bd5f563225-ceph-nova-0" (OuterVolumeSpecName: "ceph-nova-0") pod "80c10bdf-95bb-4372-ba25-b7bd5f563225" (UID: "80c10bdf-95bb-4372-ba25-b7bd5f563225"). InnerVolumeSpecName "ceph-nova-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.556945 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-inventory" (OuterVolumeSpecName: "inventory") pod "80c10bdf-95bb-4372-ba25-b7bd5f563225" (UID: "80c10bdf-95bb-4372-ba25-b7bd5f563225"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.557577 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "80c10bdf-95bb-4372-ba25-b7bd5f563225" (UID: "80c10bdf-95bb-4372-ba25-b7bd5f563225"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.557978 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "80c10bdf-95bb-4372-ba25-b7bd5f563225" (UID: "80c10bdf-95bb-4372-ba25-b7bd5f563225"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.576788 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "80c10bdf-95bb-4372-ba25-b7bd5f563225" (UID: "80c10bdf-95bb-4372-ba25-b7bd5f563225"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.617955 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.617998 4730 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.618012 4730 reconciler_common.go:293] "Volume detached for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/80c10bdf-95bb-4372-ba25-b7bd5f563225-ceph-nova-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.618024 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.618035 4730 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.618047 4730 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.618058 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgnvk\" (UniqueName: \"kubernetes.io/projected/80c10bdf-95bb-4372-ba25-b7bd5f563225-kube-api-access-xgnvk\") on node \"crc\" DevicePath \"\"" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.618069 4730 reconciler_common.go:293] "Volume detached for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-custom-ceph-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.618081 4730 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.618093 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/80c10bdf-95bb-4372-ba25-b7bd5f563225-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.618105 4730 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/80c10bdf-95bb-4372-ba25-b7bd5f563225-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.939069 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" event={"ID":"80c10bdf-95bb-4372-ba25-b7bd5f563225","Type":"ContainerDied","Data":"49def117d8618da517dca6e9f31eba9d49e15cb1fbcb530f23731c98dceebcb8"} Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.939119 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49def117d8618da517dca6e9f31eba9d49e15cb1fbcb530f23731c98dceebcb8" Sep 30 10:40:34 crc kubenswrapper[4730]: I0930 10:40:34.939146 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.091642 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw"] Sep 30 10:40:35 crc kubenswrapper[4730]: E0930 10:40:35.092424 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80c10bdf-95bb-4372-ba25-b7bd5f563225" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.092569 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="80c10bdf-95bb-4372-ba25-b7bd5f563225" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.092994 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="80c10bdf-95bb-4372-ba25-b7bd5f563225" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.093905 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.096390 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.096592 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.096740 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.096892 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.097532 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.097870 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-8vdlt" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.104438 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw"] Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.233808 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m28m\" (UniqueName: \"kubernetes.io/projected/9a9e7f46-a278-48e1-9171-826bbba2fe2b-kube-api-access-8m28m\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.233894 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceph\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.233921 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.234006 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.234036 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.234143 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.234162 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.234186 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.336290 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.336346 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.336383 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.336455 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m28m\" (UniqueName: \"kubernetes.io/projected/9a9e7f46-a278-48e1-9171-826bbba2fe2b-kube-api-access-8m28m\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.336487 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceph\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.336520 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.336563 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.336587 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.340010 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.341790 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.342170 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.342212 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.342760 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceph\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.347304 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.347886 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.358478 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m28m\" (UniqueName: \"kubernetes.io/projected/9a9e7f46-a278-48e1-9171-826bbba2fe2b-kube-api-access-8m28m\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-plcgw\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.381405 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:40:35 crc kubenswrapper[4730]: E0930 10:40:35.381859 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.410341 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.935682 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw"] Sep 30 10:40:35 crc kubenswrapper[4730]: I0930 10:40:35.944854 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 10:40:36 crc kubenswrapper[4730]: I0930 10:40:36.959396 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" event={"ID":"9a9e7f46-a278-48e1-9171-826bbba2fe2b","Type":"ContainerStarted","Data":"ed81eb7f5010d7830940fee737031455a06239699bd05e8ef77803965a9870cb"} Sep 30 10:40:36 crc kubenswrapper[4730]: I0930 10:40:36.959921 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" event={"ID":"9a9e7f46-a278-48e1-9171-826bbba2fe2b","Type":"ContainerStarted","Data":"4e0d4edaafbb9f4cc69965d2b8262090d9907fd4205f5bce00baa435f6e71048"} Sep 30 10:40:36 crc kubenswrapper[4730]: I0930 10:40:36.981051 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" podStartSLOduration=1.521209281 podStartE2EDuration="1.98103169s" podCreationTimestamp="2025-09-30 10:40:35 +0000 UTC" firstStartedPulling="2025-09-30 10:40:35.944600806 +0000 UTC m=+3080.277860799" lastFinishedPulling="2025-09-30 10:40:36.404423215 +0000 UTC m=+3080.737683208" observedRunningTime="2025-09-30 10:40:36.976075281 +0000 UTC m=+3081.309335264" watchObservedRunningTime="2025-09-30 10:40:36.98103169 +0000 UTC m=+3081.314291683" Sep 30 10:40:46 crc kubenswrapper[4730]: I0930 10:40:46.388262 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:40:46 crc kubenswrapper[4730]: E0930 10:40:46.389090 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:41:00 crc kubenswrapper[4730]: I0930 10:41:00.381580 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:41:00 crc kubenswrapper[4730]: E0930 10:41:00.382552 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:41:08 crc kubenswrapper[4730]: I0930 10:41:08.125232 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zxfgw"] Sep 30 10:41:08 crc kubenswrapper[4730]: I0930 10:41:08.127948 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:08 crc kubenswrapper[4730]: I0930 10:41:08.142888 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zxfgw"] Sep 30 10:41:08 crc kubenswrapper[4730]: I0930 10:41:08.166751 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04ec1190-8a48-4c88-8034-6e43d5d27a59-catalog-content\") pod \"redhat-operators-zxfgw\" (UID: \"04ec1190-8a48-4c88-8034-6e43d5d27a59\") " pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:08 crc kubenswrapper[4730]: I0930 10:41:08.166931 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qc75d\" (UniqueName: \"kubernetes.io/projected/04ec1190-8a48-4c88-8034-6e43d5d27a59-kube-api-access-qc75d\") pod \"redhat-operators-zxfgw\" (UID: \"04ec1190-8a48-4c88-8034-6e43d5d27a59\") " pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:08 crc kubenswrapper[4730]: I0930 10:41:08.166976 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04ec1190-8a48-4c88-8034-6e43d5d27a59-utilities\") pod \"redhat-operators-zxfgw\" (UID: \"04ec1190-8a48-4c88-8034-6e43d5d27a59\") " pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:08 crc kubenswrapper[4730]: I0930 10:41:08.269443 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qc75d\" (UniqueName: \"kubernetes.io/projected/04ec1190-8a48-4c88-8034-6e43d5d27a59-kube-api-access-qc75d\") pod \"redhat-operators-zxfgw\" (UID: \"04ec1190-8a48-4c88-8034-6e43d5d27a59\") " pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:08 crc kubenswrapper[4730]: I0930 10:41:08.269509 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04ec1190-8a48-4c88-8034-6e43d5d27a59-utilities\") pod \"redhat-operators-zxfgw\" (UID: \"04ec1190-8a48-4c88-8034-6e43d5d27a59\") " pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:08 crc kubenswrapper[4730]: I0930 10:41:08.269596 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04ec1190-8a48-4c88-8034-6e43d5d27a59-catalog-content\") pod \"redhat-operators-zxfgw\" (UID: \"04ec1190-8a48-4c88-8034-6e43d5d27a59\") " pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:08 crc kubenswrapper[4730]: I0930 10:41:08.270225 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04ec1190-8a48-4c88-8034-6e43d5d27a59-catalog-content\") pod \"redhat-operators-zxfgw\" (UID: \"04ec1190-8a48-4c88-8034-6e43d5d27a59\") " pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:08 crc kubenswrapper[4730]: I0930 10:41:08.270352 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04ec1190-8a48-4c88-8034-6e43d5d27a59-utilities\") pod \"redhat-operators-zxfgw\" (UID: \"04ec1190-8a48-4c88-8034-6e43d5d27a59\") " pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:08 crc kubenswrapper[4730]: I0930 10:41:08.294019 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qc75d\" (UniqueName: \"kubernetes.io/projected/04ec1190-8a48-4c88-8034-6e43d5d27a59-kube-api-access-qc75d\") pod \"redhat-operators-zxfgw\" (UID: \"04ec1190-8a48-4c88-8034-6e43d5d27a59\") " pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:08 crc kubenswrapper[4730]: I0930 10:41:08.457114 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:09 crc kubenswrapper[4730]: I0930 10:41:09.042960 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zxfgw"] Sep 30 10:41:09 crc kubenswrapper[4730]: I0930 10:41:09.245727 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxfgw" event={"ID":"04ec1190-8a48-4c88-8034-6e43d5d27a59","Type":"ContainerStarted","Data":"f16bbb450999fe727eeb5a6c75c1616f8fa69253e77553fa487a4661ed6cf1aa"} Sep 30 10:41:09 crc kubenswrapper[4730]: I0930 10:41:09.245776 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxfgw" event={"ID":"04ec1190-8a48-4c88-8034-6e43d5d27a59","Type":"ContainerStarted","Data":"07f96ae478510b3b35fddcd1ebd4c75d38dd358f63f619678ce032ea95bb3a82"} Sep 30 10:41:10 crc kubenswrapper[4730]: I0930 10:41:10.259154 4730 generic.go:334] "Generic (PLEG): container finished" podID="04ec1190-8a48-4c88-8034-6e43d5d27a59" containerID="f16bbb450999fe727eeb5a6c75c1616f8fa69253e77553fa487a4661ed6cf1aa" exitCode=0 Sep 30 10:41:10 crc kubenswrapper[4730]: I0930 10:41:10.259207 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxfgw" event={"ID":"04ec1190-8a48-4c88-8034-6e43d5d27a59","Type":"ContainerDied","Data":"f16bbb450999fe727eeb5a6c75c1616f8fa69253e77553fa487a4661ed6cf1aa"} Sep 30 10:41:12 crc kubenswrapper[4730]: I0930 10:41:12.380587 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:41:12 crc kubenswrapper[4730]: E0930 10:41:12.381203 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:41:21 crc kubenswrapper[4730]: I0930 10:41:21.352958 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxfgw" event={"ID":"04ec1190-8a48-4c88-8034-6e43d5d27a59","Type":"ContainerStarted","Data":"e9cbede748b53b12955d59a5d882b4ba3a3f4eb56351a48a84a3159cdd22aab5"} Sep 30 10:41:22 crc kubenswrapper[4730]: I0930 10:41:22.365983 4730 generic.go:334] "Generic (PLEG): container finished" podID="04ec1190-8a48-4c88-8034-6e43d5d27a59" containerID="e9cbede748b53b12955d59a5d882b4ba3a3f4eb56351a48a84a3159cdd22aab5" exitCode=0 Sep 30 10:41:22 crc kubenswrapper[4730]: I0930 10:41:22.366036 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxfgw" event={"ID":"04ec1190-8a48-4c88-8034-6e43d5d27a59","Type":"ContainerDied","Data":"e9cbede748b53b12955d59a5d882b4ba3a3f4eb56351a48a84a3159cdd22aab5"} Sep 30 10:41:23 crc kubenswrapper[4730]: I0930 10:41:23.381213 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:41:23 crc kubenswrapper[4730]: E0930 10:41:23.381955 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:41:24 crc kubenswrapper[4730]: I0930 10:41:24.395773 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxfgw" event={"ID":"04ec1190-8a48-4c88-8034-6e43d5d27a59","Type":"ContainerStarted","Data":"018d1ecb18bee1bad42690dc70cdd5c7bdc0a89fc2afadd6c51180d704bd52a1"} Sep 30 10:41:24 crc kubenswrapper[4730]: I0930 10:41:24.411016 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zxfgw" podStartSLOduration=2.804999085 podStartE2EDuration="16.410993396s" podCreationTimestamp="2025-09-30 10:41:08 +0000 UTC" firstStartedPulling="2025-09-30 10:41:10.261099198 +0000 UTC m=+3114.594359191" lastFinishedPulling="2025-09-30 10:41:23.867093509 +0000 UTC m=+3128.200353502" observedRunningTime="2025-09-30 10:41:24.410896594 +0000 UTC m=+3128.744156597" watchObservedRunningTime="2025-09-30 10:41:24.410993396 +0000 UTC m=+3128.744253389" Sep 30 10:41:28 crc kubenswrapper[4730]: I0930 10:41:28.458055 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:28 crc kubenswrapper[4730]: I0930 10:41:28.458679 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:28 crc kubenswrapper[4730]: I0930 10:41:28.503416 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:29 crc kubenswrapper[4730]: I0930 10:41:29.500838 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zxfgw" Sep 30 10:41:29 crc kubenswrapper[4730]: I0930 10:41:29.562319 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zxfgw"] Sep 30 10:41:29 crc kubenswrapper[4730]: I0930 10:41:29.611889 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-njvdw"] Sep 30 10:41:29 crc kubenswrapper[4730]: I0930 10:41:29.612216 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-njvdw" podUID="835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" containerName="registry-server" containerID="cri-o://778483313a3d26dd28acea420f08291c067043e405901167d45ec0ca30e33aa3" gracePeriod=2 Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.177502 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.236205 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-utilities\") pod \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\" (UID: \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\") " Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.236248 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-catalog-content\") pod \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\" (UID: \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\") " Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.236313 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdtmn\" (UniqueName: \"kubernetes.io/projected/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-kube-api-access-xdtmn\") pod \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\" (UID: \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\") " Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.237172 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-utilities" (OuterVolumeSpecName: "utilities") pod "835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" (UID: "835e01d4-2f60-40dd-8fb7-c4ac9adcc01f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.251408 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-kube-api-access-xdtmn" (OuterVolumeSpecName: "kube-api-access-xdtmn") pod "835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" (UID: "835e01d4-2f60-40dd-8fb7-c4ac9adcc01f"). InnerVolumeSpecName "kube-api-access-xdtmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.337152 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" (UID: "835e01d4-2f60-40dd-8fb7-c4ac9adcc01f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.337690 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-catalog-content\") pod \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\" (UID: \"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f\") " Sep 30 10:41:30 crc kubenswrapper[4730]: W0930 10:41:30.337945 4730 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f/volumes/kubernetes.io~empty-dir/catalog-content Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.338004 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" (UID: "835e01d4-2f60-40dd-8fb7-c4ac9adcc01f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.338256 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.338273 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.338290 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdtmn\" (UniqueName: \"kubernetes.io/projected/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f-kube-api-access-xdtmn\") on node \"crc\" DevicePath \"\"" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.455754 4730 generic.go:334] "Generic (PLEG): container finished" podID="835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" containerID="778483313a3d26dd28acea420f08291c067043e405901167d45ec0ca30e33aa3" exitCode=0 Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.457197 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-njvdw" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.457763 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njvdw" event={"ID":"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f","Type":"ContainerDied","Data":"778483313a3d26dd28acea420f08291c067043e405901167d45ec0ca30e33aa3"} Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.457806 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njvdw" event={"ID":"835e01d4-2f60-40dd-8fb7-c4ac9adcc01f","Type":"ContainerDied","Data":"6281f0cc404cdb8b9b14bbe62b3b92f3b7889d1e6668019cf613575460d9ae04"} Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.457832 4730 scope.go:117] "RemoveContainer" containerID="778483313a3d26dd28acea420f08291c067043e405901167d45ec0ca30e33aa3" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.484699 4730 scope.go:117] "RemoveContainer" containerID="7423b60c765ea89586fcc941bea89f7ed1f17a74cf3c3c95bba2b4c0068eb0cb" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.490745 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-njvdw"] Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.501906 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-njvdw"] Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.505991 4730 scope.go:117] "RemoveContainer" containerID="6df706cd8ae6164f348a9a798e17b92240c4f8ea4a04ad92b0e61e7dcc0dc95d" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.552395 4730 scope.go:117] "RemoveContainer" containerID="778483313a3d26dd28acea420f08291c067043e405901167d45ec0ca30e33aa3" Sep 30 10:41:30 crc kubenswrapper[4730]: E0930 10:41:30.552951 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"778483313a3d26dd28acea420f08291c067043e405901167d45ec0ca30e33aa3\": container with ID starting with 778483313a3d26dd28acea420f08291c067043e405901167d45ec0ca30e33aa3 not found: ID does not exist" containerID="778483313a3d26dd28acea420f08291c067043e405901167d45ec0ca30e33aa3" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.553014 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"778483313a3d26dd28acea420f08291c067043e405901167d45ec0ca30e33aa3"} err="failed to get container status \"778483313a3d26dd28acea420f08291c067043e405901167d45ec0ca30e33aa3\": rpc error: code = NotFound desc = could not find container \"778483313a3d26dd28acea420f08291c067043e405901167d45ec0ca30e33aa3\": container with ID starting with 778483313a3d26dd28acea420f08291c067043e405901167d45ec0ca30e33aa3 not found: ID does not exist" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.553052 4730 scope.go:117] "RemoveContainer" containerID="7423b60c765ea89586fcc941bea89f7ed1f17a74cf3c3c95bba2b4c0068eb0cb" Sep 30 10:41:30 crc kubenswrapper[4730]: E0930 10:41:30.554900 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7423b60c765ea89586fcc941bea89f7ed1f17a74cf3c3c95bba2b4c0068eb0cb\": container with ID starting with 7423b60c765ea89586fcc941bea89f7ed1f17a74cf3c3c95bba2b4c0068eb0cb not found: ID does not exist" containerID="7423b60c765ea89586fcc941bea89f7ed1f17a74cf3c3c95bba2b4c0068eb0cb" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.554962 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7423b60c765ea89586fcc941bea89f7ed1f17a74cf3c3c95bba2b4c0068eb0cb"} err="failed to get container status \"7423b60c765ea89586fcc941bea89f7ed1f17a74cf3c3c95bba2b4c0068eb0cb\": rpc error: code = NotFound desc = could not find container \"7423b60c765ea89586fcc941bea89f7ed1f17a74cf3c3c95bba2b4c0068eb0cb\": container with ID starting with 7423b60c765ea89586fcc941bea89f7ed1f17a74cf3c3c95bba2b4c0068eb0cb not found: ID does not exist" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.555032 4730 scope.go:117] "RemoveContainer" containerID="6df706cd8ae6164f348a9a798e17b92240c4f8ea4a04ad92b0e61e7dcc0dc95d" Sep 30 10:41:30 crc kubenswrapper[4730]: E0930 10:41:30.555535 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6df706cd8ae6164f348a9a798e17b92240c4f8ea4a04ad92b0e61e7dcc0dc95d\": container with ID starting with 6df706cd8ae6164f348a9a798e17b92240c4f8ea4a04ad92b0e61e7dcc0dc95d not found: ID does not exist" containerID="6df706cd8ae6164f348a9a798e17b92240c4f8ea4a04ad92b0e61e7dcc0dc95d" Sep 30 10:41:30 crc kubenswrapper[4730]: I0930 10:41:30.555572 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6df706cd8ae6164f348a9a798e17b92240c4f8ea4a04ad92b0e61e7dcc0dc95d"} err="failed to get container status \"6df706cd8ae6164f348a9a798e17b92240c4f8ea4a04ad92b0e61e7dcc0dc95d\": rpc error: code = NotFound desc = could not find container \"6df706cd8ae6164f348a9a798e17b92240c4f8ea4a04ad92b0e61e7dcc0dc95d\": container with ID starting with 6df706cd8ae6164f348a9a798e17b92240c4f8ea4a04ad92b0e61e7dcc0dc95d not found: ID does not exist" Sep 30 10:41:32 crc kubenswrapper[4730]: I0930 10:41:32.392360 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" path="/var/lib/kubelet/pods/835e01d4-2f60-40dd-8fb7-c4ac9adcc01f/volumes" Sep 30 10:41:38 crc kubenswrapper[4730]: I0930 10:41:38.380740 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:41:38 crc kubenswrapper[4730]: E0930 10:41:38.381291 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:41:51 crc kubenswrapper[4730]: I0930 10:41:51.381056 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:41:51 crc kubenswrapper[4730]: E0930 10:41:51.381921 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:42:06 crc kubenswrapper[4730]: I0930 10:42:06.395647 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:42:06 crc kubenswrapper[4730]: E0930 10:42:06.396488 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:42:19 crc kubenswrapper[4730]: I0930 10:42:19.381854 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:42:19 crc kubenswrapper[4730]: E0930 10:42:19.383018 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:42:32 crc kubenswrapper[4730]: I0930 10:42:32.381407 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:42:33 crc kubenswrapper[4730]: I0930 10:42:33.055490 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"356c135f70f8fbd71ae905359591919213eb62e111b8492c4fab1464016900ca"} Sep 30 10:42:51 crc kubenswrapper[4730]: I0930 10:42:51.210367 4730 generic.go:334] "Generic (PLEG): container finished" podID="9a9e7f46-a278-48e1-9171-826bbba2fe2b" containerID="ed81eb7f5010d7830940fee737031455a06239699bd05e8ef77803965a9870cb" exitCode=0 Sep 30 10:42:51 crc kubenswrapper[4730]: I0930 10:42:51.210466 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" event={"ID":"9a9e7f46-a278-48e1-9171-826bbba2fe2b","Type":"ContainerDied","Data":"ed81eb7f5010d7830940fee737031455a06239699bd05e8ef77803965a9870cb"} Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.709351 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.793737 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceph\") pod \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.794094 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-1\") pod \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.794118 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-telemetry-combined-ca-bundle\") pod \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.794145 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-0\") pod \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.794210 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8m28m\" (UniqueName: \"kubernetes.io/projected/9a9e7f46-a278-48e1-9171-826bbba2fe2b-kube-api-access-8m28m\") pod \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.794260 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ssh-key\") pod \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.794292 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-2\") pod \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.794992 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-inventory\") pod \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\" (UID: \"9a9e7f46-a278-48e1-9171-826bbba2fe2b\") " Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.808902 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceph" (OuterVolumeSpecName: "ceph") pod "9a9e7f46-a278-48e1-9171-826bbba2fe2b" (UID: "9a9e7f46-a278-48e1-9171-826bbba2fe2b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.813180 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "9a9e7f46-a278-48e1-9171-826bbba2fe2b" (UID: "9a9e7f46-a278-48e1-9171-826bbba2fe2b"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.814431 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a9e7f46-a278-48e1-9171-826bbba2fe2b-kube-api-access-8m28m" (OuterVolumeSpecName: "kube-api-access-8m28m") pod "9a9e7f46-a278-48e1-9171-826bbba2fe2b" (UID: "9a9e7f46-a278-48e1-9171-826bbba2fe2b"). InnerVolumeSpecName "kube-api-access-8m28m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.824084 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9a9e7f46-a278-48e1-9171-826bbba2fe2b" (UID: "9a9e7f46-a278-48e1-9171-826bbba2fe2b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.824567 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "9a9e7f46-a278-48e1-9171-826bbba2fe2b" (UID: "9a9e7f46-a278-48e1-9171-826bbba2fe2b"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.825843 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "9a9e7f46-a278-48e1-9171-826bbba2fe2b" (UID: "9a9e7f46-a278-48e1-9171-826bbba2fe2b"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.836190 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-inventory" (OuterVolumeSpecName: "inventory") pod "9a9e7f46-a278-48e1-9171-826bbba2fe2b" (UID: "9a9e7f46-a278-48e1-9171-826bbba2fe2b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.842235 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "9a9e7f46-a278-48e1-9171-826bbba2fe2b" (UID: "9a9e7f46-a278-48e1-9171-826bbba2fe2b"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.897227 4730 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.897261 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.897271 4730 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.897283 4730 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.897294 4730 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.897303 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8m28m\" (UniqueName: \"kubernetes.io/projected/9a9e7f46-a278-48e1-9171-826bbba2fe2b-kube-api-access-8m28m\") on node \"crc\" DevicePath \"\"" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.897313 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:42:52 crc kubenswrapper[4730]: I0930 10:42:52.897323 4730 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/9a9e7f46-a278-48e1-9171-826bbba2fe2b-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 30 10:42:53 crc kubenswrapper[4730]: I0930 10:42:53.241376 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" event={"ID":"9a9e7f46-a278-48e1-9171-826bbba2fe2b","Type":"ContainerDied","Data":"4e0d4edaafbb9f4cc69965d2b8262090d9907fd4205f5bce00baa435f6e71048"} Sep 30 10:42:53 crc kubenswrapper[4730]: I0930 10:42:53.241422 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e0d4edaafbb9f4cc69965d2b8262090d9907fd4205f5bce00baa435f6e71048" Sep 30 10:42:53 crc kubenswrapper[4730]: I0930 10:42:53.241474 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-plcgw" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.384798 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Sep 30 10:43:11 crc kubenswrapper[4730]: E0930 10:43:11.385732 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" containerName="extract-content" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.385746 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" containerName="extract-content" Sep 30 10:43:11 crc kubenswrapper[4730]: E0930 10:43:11.385759 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" containerName="registry-server" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.385765 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" containerName="registry-server" Sep 30 10:43:11 crc kubenswrapper[4730]: E0930 10:43:11.385774 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a9e7f46-a278-48e1-9171-826bbba2fe2b" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.385781 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a9e7f46-a278-48e1-9171-826bbba2fe2b" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 10:43:11 crc kubenswrapper[4730]: E0930 10:43:11.385797 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" containerName="extract-utilities" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.385803 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" containerName="extract-utilities" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.386012 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="835e01d4-2f60-40dd-8fb7-c4ac9adcc01f" containerName="registry-server" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.386028 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a9e7f46-a278-48e1-9171-826bbba2fe2b" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.387183 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.389588 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.389960 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.402069 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.419152 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.421722 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.425553 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.427818 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.447063 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-etc-nvme\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.447131 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0983f2ba-1e23-492a-abb5-5cc4f4199925-scripts\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.447160 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0983f2ba-1e23-492a-abb5-5cc4f4199925-config-data-custom\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.447237 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-run\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.447276 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.447298 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0983f2ba-1e23-492a-abb5-5cc4f4199925-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.447363 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0983f2ba-1e23-492a-abb5-5cc4f4199925-config-data\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.447389 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.448086 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj58h\" (UniqueName: \"kubernetes.io/projected/0983f2ba-1e23-492a-abb5-5cc4f4199925-kube-api-access-hj58h\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.448234 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0983f2ba-1e23-492a-abb5-5cc4f4199925-ceph\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.448282 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-dev\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.448372 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.448393 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-lib-modules\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.448433 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.448453 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-sys\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.448478 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.549900 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-etc-nvme\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.549960 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.549996 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ad078102-347b-4f85-8fa5-f83cbf35c06a-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550013 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0983f2ba-1e23-492a-abb5-5cc4f4199925-scripts\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550031 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550067 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0983f2ba-1e23-492a-abb5-5cc4f4199925-config-data-custom\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550055 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-etc-nvme\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550089 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad078102-347b-4f85-8fa5-f83cbf35c06a-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550167 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-run\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550202 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550222 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-run\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550242 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-sys\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550260 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-dev\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550283 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550299 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0983f2ba-1e23-492a-abb5-5cc4f4199925-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550328 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad078102-347b-4f85-8fa5-f83cbf35c06a-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550348 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0983f2ba-1e23-492a-abb5-5cc4f4199925-config-data\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550366 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad078102-347b-4f85-8fa5-f83cbf35c06a-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550386 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550402 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550420 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj58h\" (UniqueName: \"kubernetes.io/projected/0983f2ba-1e23-492a-abb5-5cc4f4199925-kube-api-access-hj58h\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550475 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550493 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0983f2ba-1e23-492a-abb5-5cc4f4199925-ceph\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550514 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-dev\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550531 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550561 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550576 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g845\" (UniqueName: \"kubernetes.io/projected/ad078102-347b-4f85-8fa5-f83cbf35c06a-kube-api-access-6g845\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550592 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550630 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-lib-modules\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550655 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550672 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-sys\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550691 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550709 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad078102-347b-4f85-8fa5-f83cbf35c06a-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550786 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-run\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.550994 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.551021 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-dev\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.551856 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.551916 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.552286 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-lib-modules\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.552328 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-sys\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.552343 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.552381 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0983f2ba-1e23-492a-abb5-5cc4f4199925-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.556359 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0983f2ba-1e23-492a-abb5-5cc4f4199925-config-data-custom\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.556531 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0983f2ba-1e23-492a-abb5-5cc4f4199925-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.557194 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0983f2ba-1e23-492a-abb5-5cc4f4199925-config-data\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.557302 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0983f2ba-1e23-492a-abb5-5cc4f4199925-ceph\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.557763 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0983f2ba-1e23-492a-abb5-5cc4f4199925-scripts\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.580018 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj58h\" (UniqueName: \"kubernetes.io/projected/0983f2ba-1e23-492a-abb5-5cc4f4199925-kube-api-access-hj58h\") pod \"cinder-backup-0\" (UID: \"0983f2ba-1e23-492a-abb5-5cc4f4199925\") " pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652243 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652311 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652347 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652362 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g845\" (UniqueName: \"kubernetes.io/projected/ad078102-347b-4f85-8fa5-f83cbf35c06a-kube-api-access-6g845\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652395 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad078102-347b-4f85-8fa5-f83cbf35c06a-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652403 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652486 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652429 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652489 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652535 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652649 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ad078102-347b-4f85-8fa5-f83cbf35c06a-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652694 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652779 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad078102-347b-4f85-8fa5-f83cbf35c06a-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652830 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-run\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652899 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652960 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-sys\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.652995 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-dev\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.653087 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-run\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.653136 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.653161 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad078102-347b-4f85-8fa5-f83cbf35c06a-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.653195 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-sys\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.653223 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-dev\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.653195 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.653231 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad078102-347b-4f85-8fa5-f83cbf35c06a-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.653300 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.653573 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/ad078102-347b-4f85-8fa5-f83cbf35c06a-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.657255 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad078102-347b-4f85-8fa5-f83cbf35c06a-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.657942 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ad078102-347b-4f85-8fa5-f83cbf35c06a-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.658438 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad078102-347b-4f85-8fa5-f83cbf35c06a-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.660121 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad078102-347b-4f85-8fa5-f83cbf35c06a-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.669021 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad078102-347b-4f85-8fa5-f83cbf35c06a-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.677722 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g845\" (UniqueName: \"kubernetes.io/projected/ad078102-347b-4f85-8fa5-f83cbf35c06a-kube-api-access-6g845\") pod \"cinder-volume-volume1-0\" (UID: \"ad078102-347b-4f85-8fa5-f83cbf35c06a\") " pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.704502 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume2-0"] Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.706481 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.707985 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume2-config-data" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.718701 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume2-0"] Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.733310 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.749404 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.755518 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-ceph\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.755577 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-etc-iscsi\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.755632 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-scripts\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.755652 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-etc-nvme\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.755692 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-combined-ca-bundle\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.755762 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-var-locks-cinder\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.755784 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-run\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.755879 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4l9j\" (UniqueName: \"kubernetes.io/projected/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-kube-api-access-p4l9j\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.755938 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-config-data\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.755976 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-dev\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.756034 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-var-locks-brick\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.756078 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-config-data-custom\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.756095 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-sys\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.756121 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-etc-machine-id\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.756143 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-var-lib-cinder\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.756210 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-lib-modules\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.858220 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-var-locks-cinder\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.858264 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-run\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.858297 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4l9j\" (UniqueName: \"kubernetes.io/projected/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-kube-api-access-p4l9j\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.858328 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-config-data\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.858358 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-dev\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.858358 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-var-locks-cinder\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.858431 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-run\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.858450 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-var-locks-brick\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.858387 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-var-locks-brick\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.858877 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-config-data-custom\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.858930 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-sys\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.858978 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-etc-machine-id\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.859007 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-var-lib-cinder\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.859105 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-lib-modules\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.859243 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-ceph\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.859322 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-etc-iscsi\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.859382 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-scripts\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.859411 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-etc-nvme\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.859472 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-combined-ca-bundle\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.861393 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-dev\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.861466 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-lib-modules\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.862789 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-etc-machine-id\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.862865 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-sys\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.862923 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-var-lib-cinder\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.862961 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-etc-iscsi\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.863319 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-config-data\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.863325 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-combined-ca-bundle\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.863430 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-etc-nvme\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.864917 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-config-data-custom\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.868082 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-ceph\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.868174 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-scripts\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.880334 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4l9j\" (UniqueName: \"kubernetes.io/projected/12c02e5f-fb4a-46e7-8772-07bbe148bdcd-kube-api-access-p4l9j\") pod \"cinder-volume-volume2-0\" (UID: \"12c02e5f-fb4a-46e7-8772-07bbe148bdcd\") " pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.957557 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6f965598bf-7jtmk"] Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.959387 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.964519 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.964831 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.964987 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-gdwnn" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.965136 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Sep 30 10:43:11 crc kubenswrapper[4730]: I0930 10:43:11.989944 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6f965598bf-7jtmk"] Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.048029 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.049591 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.061665 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.062118 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-6c8vd" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.062346 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.062555 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.062965 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d563cf1f-fec6-4264-986f-61763e37c786-logs\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.063110 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d563cf1f-fec6-4264-986f-61763e37c786-scripts\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.063171 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl8cx\" (UniqueName: \"kubernetes.io/projected/d563cf1f-fec6-4264-986f-61763e37c786-kube-api-access-hl8cx\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.063208 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d563cf1f-fec6-4264-986f-61763e37c786-horizon-secret-key\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.063259 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d563cf1f-fec6-4264-986f-61763e37c786-config-data\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.063509 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.120705 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.144295 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-866b9f44b9-x6nw7"] Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.145965 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.164748 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d563cf1f-fec6-4264-986f-61763e37c786-horizon-secret-key\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.164799 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/27e55446-db32-4ddf-a826-cf187d660f08-ceph\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.164830 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvj25\" (UniqueName: \"kubernetes.io/projected/27e55446-db32-4ddf-a826-cf187d660f08-kube-api-access-jvj25\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.164857 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d563cf1f-fec6-4264-986f-61763e37c786-config-data\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.164890 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.164930 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.164945 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.164971 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-config-data\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.164989 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d563cf1f-fec6-4264-986f-61763e37c786-logs\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.165008 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-scripts\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.165037 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27e55446-db32-4ddf-a826-cf187d660f08-logs\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.165134 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d563cf1f-fec6-4264-986f-61763e37c786-scripts\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.165169 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/27e55446-db32-4ddf-a826-cf187d660f08-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.165196 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl8cx\" (UniqueName: \"kubernetes.io/projected/d563cf1f-fec6-4264-986f-61763e37c786-kube-api-access-hl8cx\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.167216 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d563cf1f-fec6-4264-986f-61763e37c786-logs\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.167986 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d563cf1f-fec6-4264-986f-61763e37c786-scripts\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.168296 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d563cf1f-fec6-4264-986f-61763e37c786-config-data\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.169693 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-866b9f44b9-x6nw7"] Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.180343 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d563cf1f-fec6-4264-986f-61763e37c786-horizon-secret-key\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.191568 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.193567 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl8cx\" (UniqueName: \"kubernetes.io/projected/d563cf1f-fec6-4264-986f-61763e37c786-kube-api-access-hl8cx\") pod \"horizon-6f965598bf-7jtmk\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.195461 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.201291 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.201526 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.208352 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.266929 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/27e55446-db32-4ddf-a826-cf187d660f08-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.266988 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/27e55446-db32-4ddf-a826-cf187d660f08-ceph\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.267014 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvj25\" (UniqueName: \"kubernetes.io/projected/27e55446-db32-4ddf-a826-cf187d660f08-kube-api-access-jvj25\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.267050 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.267089 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.267105 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.267129 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-config-data\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.267151 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-scripts\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.267173 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27e55446-db32-4ddf-a826-cf187d660f08-logs\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.267466 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/27e55446-db32-4ddf-a826-cf187d660f08-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.267530 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27e55446-db32-4ddf-a826-cf187d660f08-logs\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.269985 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.274359 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/27e55446-db32-4ddf-a826-cf187d660f08-ceph\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.275114 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.284199 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-config-data\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.284320 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.284807 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-scripts\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.301260 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvj25\" (UniqueName: \"kubernetes.io/projected/27e55446-db32-4ddf-a826-cf187d660f08-kube-api-access-jvj25\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.308401 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.329386 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.370707 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/13559e77-1b10-43eb-af92-407513986ad3-horizon-secret-key\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.370808 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d67wh\" (UniqueName: \"kubernetes.io/projected/13559e77-1b10-43eb-af92-407513986ad3-kube-api-access-d67wh\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.370862 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21951d2d-728f-412f-9af6-9a5d6b4e40b3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.370892 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21951d2d-728f-412f-9af6-9a5d6b4e40b3-ceph\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.370992 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.371034 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-config-data\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.371095 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.371121 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-scripts\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.371180 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21951d2d-728f-412f-9af6-9a5d6b4e40b3-logs\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.371295 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13559e77-1b10-43eb-af92-407513986ad3-scripts\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.371322 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5m7vk\" (UniqueName: \"kubernetes.io/projected/21951d2d-728f-412f-9af6-9a5d6b4e40b3-kube-api-access-5m7vk\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.371367 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13559e77-1b10-43eb-af92-407513986ad3-logs\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.371392 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.371431 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/13559e77-1b10-43eb-af92-407513986ad3-config-data\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.405978 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.473385 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.473436 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/13559e77-1b10-43eb-af92-407513986ad3-config-data\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.473528 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/13559e77-1b10-43eb-af92-407513986ad3-horizon-secret-key\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.473582 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d67wh\" (UniqueName: \"kubernetes.io/projected/13559e77-1b10-43eb-af92-407513986ad3-kube-api-access-d67wh\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.473601 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21951d2d-728f-412f-9af6-9a5d6b4e40b3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.473702 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21951d2d-728f-412f-9af6-9a5d6b4e40b3-ceph\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.473726 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.473772 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-config-data\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.473833 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.473866 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-scripts\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.473881 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21951d2d-728f-412f-9af6-9a5d6b4e40b3-logs\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.473953 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13559e77-1b10-43eb-af92-407513986ad3-scripts\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.473974 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5m7vk\" (UniqueName: \"kubernetes.io/projected/21951d2d-728f-412f-9af6-9a5d6b4e40b3-kube-api-access-5m7vk\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.474020 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13559e77-1b10-43eb-af92-407513986ad3-logs\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.474569 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13559e77-1b10-43eb-af92-407513986ad3-logs\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.481873 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-config-data\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.483020 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21951d2d-728f-412f-9af6-9a5d6b4e40b3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.483077 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13559e77-1b10-43eb-af92-407513986ad3-scripts\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.483397 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.483602 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21951d2d-728f-412f-9af6-9a5d6b4e40b3-logs\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.496259 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21951d2d-728f-412f-9af6-9a5d6b4e40b3-ceph\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.501117 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-scripts\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.501598 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/13559e77-1b10-43eb-af92-407513986ad3-config-data\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.502078 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.502421 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/13559e77-1b10-43eb-af92-407513986ad3-horizon-secret-key\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.483594 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.507741 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d67wh\" (UniqueName: \"kubernetes.io/projected/13559e77-1b10-43eb-af92-407513986ad3-kube-api-access-d67wh\") pod \"horizon-866b9f44b9-x6nw7\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.528796 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5m7vk\" (UniqueName: \"kubernetes.io/projected/21951d2d-728f-412f-9af6-9a5d6b4e40b3-kube-api-access-5m7vk\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.546129 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.567147 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.701683 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 30 10:43:12 crc kubenswrapper[4730]: W0930 10:43:12.786422 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad078102_347b_4f85_8fa5_f83cbf35c06a.slice/crio-ade64981b504fc7ae6384e2e31aab2fe6a90a18d403e0b9584cdca7e48727ca6 WatchSource:0}: Error finding container ade64981b504fc7ae6384e2e31aab2fe6a90a18d403e0b9584cdca7e48727ca6: Status 404 returned error can't find the container with id ade64981b504fc7ae6384e2e31aab2fe6a90a18d403e0b9584cdca7e48727ca6 Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.786790 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.814969 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume2-0"] Sep 30 10:43:12 crc kubenswrapper[4730]: I0930 10:43:12.853914 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 10:43:12 crc kubenswrapper[4730]: W0930 10:43:12.857040 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12c02e5f_fb4a_46e7_8772_07bbe148bdcd.slice/crio-b8b0291ddee558651164a8b893c5af41a5944eed08bd258713bc11ede9c60e6f WatchSource:0}: Error finding container b8b0291ddee558651164a8b893c5af41a5944eed08bd258713bc11ede9c60e6f: Status 404 returned error can't find the container with id b8b0291ddee558651164a8b893c5af41a5944eed08bd258713bc11ede9c60e6f Sep 30 10:43:13 crc kubenswrapper[4730]: I0930 10:43:13.010179 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6f965598bf-7jtmk"] Sep 30 10:43:13 crc kubenswrapper[4730]: I0930 10:43:13.151876 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 10:43:13 crc kubenswrapper[4730]: W0930 10:43:13.162315 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27e55446_db32_4ddf_a826_cf187d660f08.slice/crio-4300f6f1b56846e244196babcf5559fed621c575738d8a27a1314cb24efd1f45 WatchSource:0}: Error finding container 4300f6f1b56846e244196babcf5559fed621c575738d8a27a1314cb24efd1f45: Status 404 returned error can't find the container with id 4300f6f1b56846e244196babcf5559fed621c575738d8a27a1314cb24efd1f45 Sep 30 10:43:13 crc kubenswrapper[4730]: I0930 10:43:13.451580 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"ad078102-347b-4f85-8fa5-f83cbf35c06a","Type":"ContainerStarted","Data":"ade64981b504fc7ae6384e2e31aab2fe6a90a18d403e0b9584cdca7e48727ca6"} Sep 30 10:43:13 crc kubenswrapper[4730]: I0930 10:43:13.453757 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f965598bf-7jtmk" event={"ID":"d563cf1f-fec6-4264-986f-61763e37c786","Type":"ContainerStarted","Data":"1e301ba986dbe1522591209b08d67aac46076405063cac4f2f6ac63ef36d269f"} Sep 30 10:43:13 crc kubenswrapper[4730]: I0930 10:43:13.458036 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume2-0" event={"ID":"12c02e5f-fb4a-46e7-8772-07bbe148bdcd","Type":"ContainerStarted","Data":"b8b0291ddee558651164a8b893c5af41a5944eed08bd258713bc11ede9c60e6f"} Sep 30 10:43:13 crc kubenswrapper[4730]: I0930 10:43:13.461960 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"27e55446-db32-4ddf-a826-cf187d660f08","Type":"ContainerStarted","Data":"4300f6f1b56846e244196babcf5559fed621c575738d8a27a1314cb24efd1f45"} Sep 30 10:43:13 crc kubenswrapper[4730]: I0930 10:43:13.474121 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"0983f2ba-1e23-492a-abb5-5cc4f4199925","Type":"ContainerStarted","Data":"b1fb83a1cbc5f2db433fbe7589b672fadfb1e3be52a23cd050c9aaa5ca344b0c"} Sep 30 10:43:13 crc kubenswrapper[4730]: I0930 10:43:13.534951 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 10:43:13 crc kubenswrapper[4730]: I0930 10:43:13.555852 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-866b9f44b9-x6nw7"] Sep 30 10:43:13 crc kubenswrapper[4730]: W0930 10:43:13.616351 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21951d2d_728f_412f_9af6_9a5d6b4e40b3.slice/crio-f17cf83323599ef7d2fd338bbeb107fa97ba334d6ef2ebdf5e5d9f883a4b2c8c WatchSource:0}: Error finding container f17cf83323599ef7d2fd338bbeb107fa97ba334d6ef2ebdf5e5d9f883a4b2c8c: Status 404 returned error can't find the container with id f17cf83323599ef7d2fd338bbeb107fa97ba334d6ef2ebdf5e5d9f883a4b2c8c Sep 30 10:43:13 crc kubenswrapper[4730]: W0930 10:43:13.652315 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13559e77_1b10_43eb_af92_407513986ad3.slice/crio-8bccc496d6a29afe6d4ed69a2b68b3832d4d0314ba4a43437c368398b90e2165 WatchSource:0}: Error finding container 8bccc496d6a29afe6d4ed69a2b68b3832d4d0314ba4a43437c368398b90e2165: Status 404 returned error can't find the container with id 8bccc496d6a29afe6d4ed69a2b68b3832d4d0314ba4a43437c368398b90e2165 Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.496810 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"21951d2d-728f-412f-9af6-9a5d6b4e40b3","Type":"ContainerStarted","Data":"f17cf83323599ef7d2fd338bbeb107fa97ba334d6ef2ebdf5e5d9f883a4b2c8c"} Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.502002 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume2-0" event={"ID":"12c02e5f-fb4a-46e7-8772-07bbe148bdcd","Type":"ContainerStarted","Data":"182022f89840bca3b10f77980dacf68d90c993dac29c804afa1e80c2f9fe55e6"} Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.508874 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"0983f2ba-1e23-492a-abb5-5cc4f4199925","Type":"ContainerStarted","Data":"37336fa5621580e4ee69a05c284b164e9ea3f7ac9aa0d4482e3ae5da63d0e024"} Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.508991 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"0983f2ba-1e23-492a-abb5-5cc4f4199925","Type":"ContainerStarted","Data":"a5e40d0df696e99cf12d0c675f4ae0c28aeb2dd666a1f0998c77e03edfd79771"} Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.519460 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"ad078102-347b-4f85-8fa5-f83cbf35c06a","Type":"ContainerStarted","Data":"02795044af5de5e3a8386445df486e854225ff28d13f178b7c76c6b6204a66a2"} Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.521199 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-866b9f44b9-x6nw7" event={"ID":"13559e77-1b10-43eb-af92-407513986ad3","Type":"ContainerStarted","Data":"8bccc496d6a29afe6d4ed69a2b68b3832d4d0314ba4a43437c368398b90e2165"} Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.537820 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=3.178815896 podStartE2EDuration="3.537802031s" podCreationTimestamp="2025-09-30 10:43:11 +0000 UTC" firstStartedPulling="2025-09-30 10:43:12.524550137 +0000 UTC m=+3236.857810130" lastFinishedPulling="2025-09-30 10:43:12.883536272 +0000 UTC m=+3237.216796265" observedRunningTime="2025-09-30 10:43:14.535523912 +0000 UTC m=+3238.868783905" watchObservedRunningTime="2025-09-30 10:43:14.537802031 +0000 UTC m=+3238.871062024" Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.775607 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6f965598bf-7jtmk"] Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.832438 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-78fdfbc44d-gv74b"] Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.858843 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.867355 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.879063 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.897296 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-78fdfbc44d-gv74b"] Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.908508 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-866b9f44b9-x6nw7"] Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.965377 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p4pq\" (UniqueName: \"kubernetes.io/projected/4e4cdc0d-7159-473c-b832-8628f59a1ebb-kube-api-access-7p4pq\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.965458 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e4cdc0d-7159-473c-b832-8628f59a1ebb-config-data\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.965572 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e4cdc0d-7159-473c-b832-8628f59a1ebb-logs\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.965744 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-horizon-tls-certs\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.965834 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-combined-ca-bundle\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.966002 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e4cdc0d-7159-473c-b832-8628f59a1ebb-scripts\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:14 crc kubenswrapper[4730]: I0930 10:43:14.966322 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-horizon-secret-key\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.038118 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.077805 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p4pq\" (UniqueName: \"kubernetes.io/projected/4e4cdc0d-7159-473c-b832-8628f59a1ebb-kube-api-access-7p4pq\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.077852 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e4cdc0d-7159-473c-b832-8628f59a1ebb-config-data\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.077888 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e4cdc0d-7159-473c-b832-8628f59a1ebb-logs\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.077934 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-horizon-tls-certs\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.077958 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-combined-ca-bundle\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.077994 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e4cdc0d-7159-473c-b832-8628f59a1ebb-scripts\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.078057 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-horizon-secret-key\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.080596 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e4cdc0d-7159-473c-b832-8628f59a1ebb-config-data\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.083557 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e4cdc0d-7159-473c-b832-8628f59a1ebb-scripts\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.084024 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e4cdc0d-7159-473c-b832-8628f59a1ebb-logs\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.087824 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-horizon-tls-certs\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.089501 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-combined-ca-bundle\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.096987 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-horizon-secret-key\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.097051 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6b9f68988b-b4q58"] Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.098600 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.107590 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p4pq\" (UniqueName: \"kubernetes.io/projected/4e4cdc0d-7159-473c-b832-8628f59a1ebb-kube-api-access-7p4pq\") pod \"horizon-78fdfbc44d-gv74b\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.119766 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6b9f68988b-b4q58"] Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.188901 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73859337-4ff6-4ada-bc9b-a29b6b1fc478-scripts\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.188989 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/73859337-4ff6-4ada-bc9b-a29b6b1fc478-horizon-secret-key\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.189021 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qthnm\" (UniqueName: \"kubernetes.io/projected/73859337-4ff6-4ada-bc9b-a29b6b1fc478-kube-api-access-qthnm\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.189055 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73859337-4ff6-4ada-bc9b-a29b6b1fc478-logs\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.189073 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/73859337-4ff6-4ada-bc9b-a29b6b1fc478-config-data\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.189144 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/73859337-4ff6-4ada-bc9b-a29b6b1fc478-horizon-tls-certs\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.189168 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73859337-4ff6-4ada-bc9b-a29b6b1fc478-combined-ca-bundle\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.290879 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73859337-4ff6-4ada-bc9b-a29b6b1fc478-scripts\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.291203 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/73859337-4ff6-4ada-bc9b-a29b6b1fc478-horizon-secret-key\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.291230 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qthnm\" (UniqueName: \"kubernetes.io/projected/73859337-4ff6-4ada-bc9b-a29b6b1fc478-kube-api-access-qthnm\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.291267 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73859337-4ff6-4ada-bc9b-a29b6b1fc478-logs\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.291290 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/73859337-4ff6-4ada-bc9b-a29b6b1fc478-config-data\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.291378 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/73859337-4ff6-4ada-bc9b-a29b6b1fc478-horizon-tls-certs\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.291407 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73859337-4ff6-4ada-bc9b-a29b6b1fc478-combined-ca-bundle\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.291578 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/73859337-4ff6-4ada-bc9b-a29b6b1fc478-scripts\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.291892 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73859337-4ff6-4ada-bc9b-a29b6b1fc478-logs\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.294066 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/73859337-4ff6-4ada-bc9b-a29b6b1fc478-config-data\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.299662 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/73859337-4ff6-4ada-bc9b-a29b6b1fc478-horizon-secret-key\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.299700 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73859337-4ff6-4ada-bc9b-a29b6b1fc478-combined-ca-bundle\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.300007 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/73859337-4ff6-4ada-bc9b-a29b6b1fc478-horizon-tls-certs\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.314471 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qthnm\" (UniqueName: \"kubernetes.io/projected/73859337-4ff6-4ada-bc9b-a29b6b1fc478-kube-api-access-qthnm\") pod \"horizon-6b9f68988b-b4q58\" (UID: \"73859337-4ff6-4ada-bc9b-a29b6b1fc478\") " pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.382456 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.431764 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.659244 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume2-0" event={"ID":"12c02e5f-fb4a-46e7-8772-07bbe148bdcd","Type":"ContainerStarted","Data":"47033dc9e08095f6959c832172d894992e664e31fa516dca8fe61438b123f0dd"} Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.703296 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"27e55446-db32-4ddf-a826-cf187d660f08","Type":"ContainerStarted","Data":"5986f17bc827a1f8e79349572fdb2c5a6a01bb48f5bb85ed7413215716e3c43f"} Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.728270 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume2-0" podStartSLOduration=3.884082206 podStartE2EDuration="4.728247133s" podCreationTimestamp="2025-09-30 10:43:11 +0000 UTC" firstStartedPulling="2025-09-30 10:43:12.866596432 +0000 UTC m=+3237.199856425" lastFinishedPulling="2025-09-30 10:43:13.710761359 +0000 UTC m=+3238.044021352" observedRunningTime="2025-09-30 10:43:15.698229863 +0000 UTC m=+3240.031489856" watchObservedRunningTime="2025-09-30 10:43:15.728247133 +0000 UTC m=+3240.061507116" Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.760449 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"ad078102-347b-4f85-8fa5-f83cbf35c06a","Type":"ContainerStarted","Data":"3292c4e2fdc172754fabf98e1f1786f3f602a7e002fc5f8b5d134637ccdc6a91"} Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.803063 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"21951d2d-728f-412f-9af6-9a5d6b4e40b3","Type":"ContainerStarted","Data":"42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c"} Sep 30 10:43:15 crc kubenswrapper[4730]: I0930 10:43:15.826156 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=3.9284674280000003 podStartE2EDuration="4.826134365s" podCreationTimestamp="2025-09-30 10:43:11 +0000 UTC" firstStartedPulling="2025-09-30 10:43:12.84573669 +0000 UTC m=+3237.178996683" lastFinishedPulling="2025-09-30 10:43:13.743403627 +0000 UTC m=+3238.076663620" observedRunningTime="2025-09-30 10:43:15.82133602 +0000 UTC m=+3240.154596033" watchObservedRunningTime="2025-09-30 10:43:15.826134365 +0000 UTC m=+3240.159394358" Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.017834 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-78fdfbc44d-gv74b"] Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.303992 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6b9f68988b-b4q58"] Sep 30 10:43:16 crc kubenswrapper[4730]: W0930 10:43:16.311792 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod73859337_4ff6_4ada_bc9b_a29b6b1fc478.slice/crio-096e93818f8a11660ad4f96ee2954fe89b49ece6479c19b7760d0ac4a9a1db01 WatchSource:0}: Error finding container 096e93818f8a11660ad4f96ee2954fe89b49ece6479c19b7760d0ac4a9a1db01: Status 404 returned error can't find the container with id 096e93818f8a11660ad4f96ee2954fe89b49ece6479c19b7760d0ac4a9a1db01 Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.740915 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.749861 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.826559 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78fdfbc44d-gv74b" event={"ID":"4e4cdc0d-7159-473c-b832-8628f59a1ebb","Type":"ContainerStarted","Data":"16183743c9f9a57fc8a190966203be7e22dc811b57b6c2fb27ac437607a33a20"} Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.837330 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"21951d2d-728f-412f-9af6-9a5d6b4e40b3","Type":"ContainerStarted","Data":"323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a"} Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.837526 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="21951d2d-728f-412f-9af6-9a5d6b4e40b3" containerName="glance-log" containerID="cri-o://42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c" gracePeriod=30 Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.838260 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="21951d2d-728f-412f-9af6-9a5d6b4e40b3" containerName="glance-httpd" containerID="cri-o://323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a" gracePeriod=30 Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.843148 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9f68988b-b4q58" event={"ID":"73859337-4ff6-4ada-bc9b-a29b6b1fc478","Type":"ContainerStarted","Data":"096e93818f8a11660ad4f96ee2954fe89b49ece6479c19b7760d0ac4a9a1db01"} Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.866518 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.866502989 podStartE2EDuration="4.866502989s" podCreationTimestamp="2025-09-30 10:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:43:16.862181665 +0000 UTC m=+3241.195441668" watchObservedRunningTime="2025-09-30 10:43:16.866502989 +0000 UTC m=+3241.199762982" Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.872048 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"27e55446-db32-4ddf-a826-cf187d660f08","Type":"ContainerStarted","Data":"8d47f620f7af63eb27799b49cc82a52bdc9691fa8795f5679a8e960fddba8cf8"} Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.872111 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="27e55446-db32-4ddf-a826-cf187d660f08" containerName="glance-log" containerID="cri-o://5986f17bc827a1f8e79349572fdb2c5a6a01bb48f5bb85ed7413215716e3c43f" gracePeriod=30 Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.872258 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="27e55446-db32-4ddf-a826-cf187d660f08" containerName="glance-httpd" containerID="cri-o://8d47f620f7af63eb27799b49cc82a52bdc9691fa8795f5679a8e960fddba8cf8" gracePeriod=30 Sep 30 10:43:16 crc kubenswrapper[4730]: I0930 10:43:16.897604 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.897587696 podStartE2EDuration="4.897587696s" podCreationTimestamp="2025-09-30 10:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:43:16.896067076 +0000 UTC m=+3241.229327069" watchObservedRunningTime="2025-09-30 10:43:16.897587696 +0000 UTC m=+3241.230847699" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.064573 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.654064 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.781550 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-combined-ca-bundle\") pod \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.781733 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5m7vk\" (UniqueName: \"kubernetes.io/projected/21951d2d-728f-412f-9af6-9a5d6b4e40b3-kube-api-access-5m7vk\") pod \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.781773 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-public-tls-certs\") pod \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.781848 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21951d2d-728f-412f-9af6-9a5d6b4e40b3-httpd-run\") pod \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.781880 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21951d2d-728f-412f-9af6-9a5d6b4e40b3-ceph\") pod \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.781908 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-config-data\") pod \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.781923 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.781957 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21951d2d-728f-412f-9af6-9a5d6b4e40b3-logs\") pod \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.781990 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-scripts\") pod \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\" (UID: \"21951d2d-728f-412f-9af6-9a5d6b4e40b3\") " Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.782481 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21951d2d-728f-412f-9af6-9a5d6b4e40b3-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "21951d2d-728f-412f-9af6-9a5d6b4e40b3" (UID: "21951d2d-728f-412f-9af6-9a5d6b4e40b3"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.782972 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21951d2d-728f-412f-9af6-9a5d6b4e40b3-logs" (OuterVolumeSpecName: "logs") pod "21951d2d-728f-412f-9af6-9a5d6b4e40b3" (UID: "21951d2d-728f-412f-9af6-9a5d6b4e40b3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.788495 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21951d2d-728f-412f-9af6-9a5d6b4e40b3-kube-api-access-5m7vk" (OuterVolumeSpecName: "kube-api-access-5m7vk") pod "21951d2d-728f-412f-9af6-9a5d6b4e40b3" (UID: "21951d2d-728f-412f-9af6-9a5d6b4e40b3"). InnerVolumeSpecName "kube-api-access-5m7vk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.789086 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21951d2d-728f-412f-9af6-9a5d6b4e40b3-ceph" (OuterVolumeSpecName: "ceph") pod "21951d2d-728f-412f-9af6-9a5d6b4e40b3" (UID: "21951d2d-728f-412f-9af6-9a5d6b4e40b3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.796817 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-scripts" (OuterVolumeSpecName: "scripts") pod "21951d2d-728f-412f-9af6-9a5d6b4e40b3" (UID: "21951d2d-728f-412f-9af6-9a5d6b4e40b3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.797088 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "21951d2d-728f-412f-9af6-9a5d6b4e40b3" (UID: "21951d2d-728f-412f-9af6-9a5d6b4e40b3"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.834103 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21951d2d-728f-412f-9af6-9a5d6b4e40b3" (UID: "21951d2d-728f-412f-9af6-9a5d6b4e40b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.876129 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-config-data" (OuterVolumeSpecName: "config-data") pod "21951d2d-728f-412f-9af6-9a5d6b4e40b3" (UID: "21951d2d-728f-412f-9af6-9a5d6b4e40b3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.885054 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.885084 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5m7vk\" (UniqueName: \"kubernetes.io/projected/21951d2d-728f-412f-9af6-9a5d6b4e40b3-kube-api-access-5m7vk\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.885099 4730 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21951d2d-728f-412f-9af6-9a5d6b4e40b3-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.885111 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21951d2d-728f-412f-9af6-9a5d6b4e40b3-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.885123 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.885150 4730 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.885163 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21951d2d-728f-412f-9af6-9a5d6b4e40b3-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.885173 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.887748 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "21951d2d-728f-412f-9af6-9a5d6b4e40b3" (UID: "21951d2d-728f-412f-9af6-9a5d6b4e40b3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.894826 4730 generic.go:334] "Generic (PLEG): container finished" podID="21951d2d-728f-412f-9af6-9a5d6b4e40b3" containerID="323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a" exitCode=0 Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.894864 4730 generic.go:334] "Generic (PLEG): container finished" podID="21951d2d-728f-412f-9af6-9a5d6b4e40b3" containerID="42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c" exitCode=143 Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.894984 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.895298 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"21951d2d-728f-412f-9af6-9a5d6b4e40b3","Type":"ContainerDied","Data":"323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a"} Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.895350 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"21951d2d-728f-412f-9af6-9a5d6b4e40b3","Type":"ContainerDied","Data":"42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c"} Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.895363 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"21951d2d-728f-412f-9af6-9a5d6b4e40b3","Type":"ContainerDied","Data":"f17cf83323599ef7d2fd338bbeb107fa97ba334d6ef2ebdf5e5d9f883a4b2c8c"} Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.895382 4730 scope.go:117] "RemoveContainer" containerID="323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.920353 4730 generic.go:334] "Generic (PLEG): container finished" podID="27e55446-db32-4ddf-a826-cf187d660f08" containerID="8d47f620f7af63eb27799b49cc82a52bdc9691fa8795f5679a8e960fddba8cf8" exitCode=0 Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.920391 4730 generic.go:334] "Generic (PLEG): container finished" podID="27e55446-db32-4ddf-a826-cf187d660f08" containerID="5986f17bc827a1f8e79349572fdb2c5a6a01bb48f5bb85ed7413215716e3c43f" exitCode=143 Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.921006 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"27e55446-db32-4ddf-a826-cf187d660f08","Type":"ContainerDied","Data":"8d47f620f7af63eb27799b49cc82a52bdc9691fa8795f5679a8e960fddba8cf8"} Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.921051 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"27e55446-db32-4ddf-a826-cf187d660f08","Type":"ContainerDied","Data":"5986f17bc827a1f8e79349572fdb2c5a6a01bb48f5bb85ed7413215716e3c43f"} Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.939442 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.940155 4730 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.940812 4730 scope.go:117] "RemoveContainer" containerID="42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.969708 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.981055 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 10:43:17 crc kubenswrapper[4730]: E0930 10:43:17.981573 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21951d2d-728f-412f-9af6-9a5d6b4e40b3" containerName="glance-httpd" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.981594 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="21951d2d-728f-412f-9af6-9a5d6b4e40b3" containerName="glance-httpd" Sep 30 10:43:17 crc kubenswrapper[4730]: E0930 10:43:17.981653 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21951d2d-728f-412f-9af6-9a5d6b4e40b3" containerName="glance-log" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.981667 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="21951d2d-728f-412f-9af6-9a5d6b4e40b3" containerName="glance-log" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.981890 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="21951d2d-728f-412f-9af6-9a5d6b4e40b3" containerName="glance-httpd" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.981908 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="21951d2d-728f-412f-9af6-9a5d6b4e40b3" containerName="glance-log" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.983331 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.987020 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.987186 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 10:43:17 crc kubenswrapper[4730]: I0930 10:43:17.996432 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.017570 4730 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.017604 4730 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/21951d2d-728f-412f-9af6-9a5d6b4e40b3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.077241 4730 scope.go:117] "RemoveContainer" containerID="323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a" Sep 30 10:43:18 crc kubenswrapper[4730]: E0930 10:43:18.082727 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a\": container with ID starting with 323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a not found: ID does not exist" containerID="323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.082778 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a"} err="failed to get container status \"323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a\": rpc error: code = NotFound desc = could not find container \"323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a\": container with ID starting with 323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a not found: ID does not exist" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.082808 4730 scope.go:117] "RemoveContainer" containerID="42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c" Sep 30 10:43:18 crc kubenswrapper[4730]: E0930 10:43:18.089441 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c\": container with ID starting with 42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c not found: ID does not exist" containerID="42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.089479 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c"} err="failed to get container status \"42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c\": rpc error: code = NotFound desc = could not find container \"42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c\": container with ID starting with 42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c not found: ID does not exist" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.089521 4730 scope.go:117] "RemoveContainer" containerID="323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.090000 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a"} err="failed to get container status \"323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a\": rpc error: code = NotFound desc = could not find container \"323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a\": container with ID starting with 323d656146faf1e80ddd114f615664fe8e15aa507584adf6b47d162a63bcb66a not found: ID does not exist" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.090015 4730 scope.go:117] "RemoveContainer" containerID="42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.090301 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c"} err="failed to get container status \"42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c\": rpc error: code = NotFound desc = could not find container \"42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c\": container with ID starting with 42c371bbe3bd602302cb494e25cbca4e7f96843d7ba1aa2eee8778c5a500dc6c not found: ID does not exist" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.227185 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.227300 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e5bde710-0351-45d0-acb8-990719f9ba34-ceph\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.231856 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e5bde710-0351-45d0-acb8-990719f9ba34-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.231929 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e5bde710-0351-45d0-acb8-990719f9ba34-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.232054 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5bde710-0351-45d0-acb8-990719f9ba34-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.232125 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97mnx\" (UniqueName: \"kubernetes.io/projected/e5bde710-0351-45d0-acb8-990719f9ba34-kube-api-access-97mnx\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.232249 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5bde710-0351-45d0-acb8-990719f9ba34-config-data\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.232273 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5bde710-0351-45d0-acb8-990719f9ba34-logs\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.232293 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5bde710-0351-45d0-acb8-990719f9ba34-scripts\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.307073 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.337018 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5bde710-0351-45d0-acb8-990719f9ba34-config-data\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.337068 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5bde710-0351-45d0-acb8-990719f9ba34-logs\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.337094 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5bde710-0351-45d0-acb8-990719f9ba34-scripts\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.337204 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.337275 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e5bde710-0351-45d0-acb8-990719f9ba34-ceph\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.337366 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e5bde710-0351-45d0-acb8-990719f9ba34-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.337399 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e5bde710-0351-45d0-acb8-990719f9ba34-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.337438 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5bde710-0351-45d0-acb8-990719f9ba34-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.337484 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97mnx\" (UniqueName: \"kubernetes.io/projected/e5bde710-0351-45d0-acb8-990719f9ba34-kube-api-access-97mnx\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.337644 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5bde710-0351-45d0-acb8-990719f9ba34-logs\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.341955 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.357458 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/e5bde710-0351-45d0-acb8-990719f9ba34-ceph\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.357778 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5bde710-0351-45d0-acb8-990719f9ba34-config-data\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.357878 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5bde710-0351-45d0-acb8-990719f9ba34-scripts\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.359029 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e5bde710-0351-45d0-acb8-990719f9ba34-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.366892 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97mnx\" (UniqueName: \"kubernetes.io/projected/e5bde710-0351-45d0-acb8-990719f9ba34-kube-api-access-97mnx\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.369337 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5bde710-0351-45d0-acb8-990719f9ba34-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.394366 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e5bde710-0351-45d0-acb8-990719f9ba34-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.419150 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21951d2d-728f-412f-9af6-9a5d6b4e40b3" path="/var/lib/kubelet/pods/21951d2d-728f-412f-9af6-9a5d6b4e40b3/volumes" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.436526 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"e5bde710-0351-45d0-acb8-990719f9ba34\") " pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.440071 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/27e55446-db32-4ddf-a826-cf187d660f08-httpd-run\") pod \"27e55446-db32-4ddf-a826-cf187d660f08\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.440106 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-combined-ca-bundle\") pod \"27e55446-db32-4ddf-a826-cf187d660f08\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.440125 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/27e55446-db32-4ddf-a826-cf187d660f08-ceph\") pod \"27e55446-db32-4ddf-a826-cf187d660f08\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.440795 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-internal-tls-certs\") pod \"27e55446-db32-4ddf-a826-cf187d660f08\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.440847 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27e55446-db32-4ddf-a826-cf187d660f08-logs\") pod \"27e55446-db32-4ddf-a826-cf187d660f08\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.441035 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"27e55446-db32-4ddf-a826-cf187d660f08\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.441054 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-scripts\") pod \"27e55446-db32-4ddf-a826-cf187d660f08\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.441080 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-config-data\") pod \"27e55446-db32-4ddf-a826-cf187d660f08\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.441403 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvj25\" (UniqueName: \"kubernetes.io/projected/27e55446-db32-4ddf-a826-cf187d660f08-kube-api-access-jvj25\") pod \"27e55446-db32-4ddf-a826-cf187d660f08\" (UID: \"27e55446-db32-4ddf-a826-cf187d660f08\") " Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.441400 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27e55446-db32-4ddf-a826-cf187d660f08-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "27e55446-db32-4ddf-a826-cf187d660f08" (UID: "27e55446-db32-4ddf-a826-cf187d660f08"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.442259 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27e55446-db32-4ddf-a826-cf187d660f08-logs" (OuterVolumeSpecName: "logs") pod "27e55446-db32-4ddf-a826-cf187d660f08" (UID: "27e55446-db32-4ddf-a826-cf187d660f08"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.442620 4730 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/27e55446-db32-4ddf-a826-cf187d660f08-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.442638 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27e55446-db32-4ddf-a826-cf187d660f08-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.445015 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27e55446-db32-4ddf-a826-cf187d660f08-ceph" (OuterVolumeSpecName: "ceph") pod "27e55446-db32-4ddf-a826-cf187d660f08" (UID: "27e55446-db32-4ddf-a826-cf187d660f08"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.445537 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "27e55446-db32-4ddf-a826-cf187d660f08" (UID: "27e55446-db32-4ddf-a826-cf187d660f08"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.446348 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-scripts" (OuterVolumeSpecName: "scripts") pod "27e55446-db32-4ddf-a826-cf187d660f08" (UID: "27e55446-db32-4ddf-a826-cf187d660f08"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.452838 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27e55446-db32-4ddf-a826-cf187d660f08-kube-api-access-jvj25" (OuterVolumeSpecName: "kube-api-access-jvj25") pod "27e55446-db32-4ddf-a826-cf187d660f08" (UID: "27e55446-db32-4ddf-a826-cf187d660f08"). InnerVolumeSpecName "kube-api-access-jvj25". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.510760 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-config-data" (OuterVolumeSpecName: "config-data") pod "27e55446-db32-4ddf-a826-cf187d660f08" (UID: "27e55446-db32-4ddf-a826-cf187d660f08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.546578 4730 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.546656 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.546670 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.546683 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvj25\" (UniqueName: \"kubernetes.io/projected/27e55446-db32-4ddf-a826-cf187d660f08-kube-api-access-jvj25\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.546696 4730 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/27e55446-db32-4ddf-a826-cf187d660f08-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.561822 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "27e55446-db32-4ddf-a826-cf187d660f08" (UID: "27e55446-db32-4ddf-a826-cf187d660f08"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.576764 4730 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.584093 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "27e55446-db32-4ddf-a826-cf187d660f08" (UID: "27e55446-db32-4ddf-a826-cf187d660f08"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.648458 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.648487 4730 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/27e55446-db32-4ddf-a826-cf187d660f08-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.648497 4730 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.652062 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.965900 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"27e55446-db32-4ddf-a826-cf187d660f08","Type":"ContainerDied","Data":"4300f6f1b56846e244196babcf5559fed621c575738d8a27a1314cb24efd1f45"} Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.966226 4730 scope.go:117] "RemoveContainer" containerID="8d47f620f7af63eb27799b49cc82a52bdc9691fa8795f5679a8e960fddba8cf8" Sep 30 10:43:18 crc kubenswrapper[4730]: I0930 10:43:18.966392 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.072073 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.088109 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.146626 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 10:43:19 crc kubenswrapper[4730]: E0930 10:43:19.147170 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e55446-db32-4ddf-a826-cf187d660f08" containerName="glance-httpd" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.147196 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e55446-db32-4ddf-a826-cf187d660f08" containerName="glance-httpd" Sep 30 10:43:19 crc kubenswrapper[4730]: E0930 10:43:19.147254 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e55446-db32-4ddf-a826-cf187d660f08" containerName="glance-log" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.147265 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e55446-db32-4ddf-a826-cf187d660f08" containerName="glance-log" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.147483 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="27e55446-db32-4ddf-a826-cf187d660f08" containerName="glance-log" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.147524 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="27e55446-db32-4ddf-a826-cf187d660f08" containerName="glance-httpd" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.148772 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.152333 4730 scope.go:117] "RemoveContainer" containerID="5986f17bc827a1f8e79349572fdb2c5a6a01bb48f5bb85ed7413215716e3c43f" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.152948 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.153952 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.157266 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.175062 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8532ec07-5ee5-40c9-82f5-df62806d03f5-logs\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.175182 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8532ec07-5ee5-40c9-82f5-df62806d03f5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.175212 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8532ec07-5ee5-40c9-82f5-df62806d03f5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.175265 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkj4v\" (UniqueName: \"kubernetes.io/projected/8532ec07-5ee5-40c9-82f5-df62806d03f5-kube-api-access-jkj4v\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.175421 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8532ec07-5ee5-40c9-82f5-df62806d03f5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.175670 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8532ec07-5ee5-40c9-82f5-df62806d03f5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.175733 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8532ec07-5ee5-40c9-82f5-df62806d03f5-ceph\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.175796 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8532ec07-5ee5-40c9-82f5-df62806d03f5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.175878 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.279295 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkj4v\" (UniqueName: \"kubernetes.io/projected/8532ec07-5ee5-40c9-82f5-df62806d03f5-kube-api-access-jkj4v\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.279397 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8532ec07-5ee5-40c9-82f5-df62806d03f5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.279495 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8532ec07-5ee5-40c9-82f5-df62806d03f5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.279518 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8532ec07-5ee5-40c9-82f5-df62806d03f5-ceph\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.279553 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8532ec07-5ee5-40c9-82f5-df62806d03f5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.280042 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.283244 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8532ec07-5ee5-40c9-82f5-df62806d03f5-logs\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.283346 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8532ec07-5ee5-40c9-82f5-df62806d03f5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.283372 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8532ec07-5ee5-40c9-82f5-df62806d03f5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.284069 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8532ec07-5ee5-40c9-82f5-df62806d03f5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.284466 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.285189 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8532ec07-5ee5-40c9-82f5-df62806d03f5-logs\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.286208 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/8532ec07-5ee5-40c9-82f5-df62806d03f5-ceph\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.286519 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8532ec07-5ee5-40c9-82f5-df62806d03f5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.297149 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8532ec07-5ee5-40c9-82f5-df62806d03f5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.299364 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8532ec07-5ee5-40c9-82f5-df62806d03f5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.301242 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkj4v\" (UniqueName: \"kubernetes.io/projected/8532ec07-5ee5-40c9-82f5-df62806d03f5-kube-api-access-jkj4v\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.314267 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8532ec07-5ee5-40c9-82f5-df62806d03f5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.333991 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"8532ec07-5ee5-40c9-82f5-df62806d03f5\") " pod="openstack/glance-default-internal-api-0" Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.376427 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 10:43:19 crc kubenswrapper[4730]: I0930 10:43:19.492139 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:20 crc kubenswrapper[4730]: I0930 10:43:20.014973 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e5bde710-0351-45d0-acb8-990719f9ba34","Type":"ContainerStarted","Data":"667ab2ac8938f065ff31e70e16c92ee9be80317e1f841cc39d4c4830b6a85839"} Sep 30 10:43:20 crc kubenswrapper[4730]: I0930 10:43:20.114196 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 10:43:20 crc kubenswrapper[4730]: I0930 10:43:20.400049 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27e55446-db32-4ddf-a826-cf187d660f08" path="/var/lib/kubelet/pods/27e55446-db32-4ddf-a826-cf187d660f08/volumes" Sep 30 10:43:21 crc kubenswrapper[4730]: I0930 10:43:21.030483 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8532ec07-5ee5-40c9-82f5-df62806d03f5","Type":"ContainerStarted","Data":"a1cbfc50eb663a38e877c5506e2442794597442b9628824c377f1429bf86247f"} Sep 30 10:43:21 crc kubenswrapper[4730]: I0930 10:43:21.032181 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8532ec07-5ee5-40c9-82f5-df62806d03f5","Type":"ContainerStarted","Data":"b8f00d1899cf2ae6c4514150d66fbb1e862534770674af62860b4494eb5a7c72"} Sep 30 10:43:21 crc kubenswrapper[4730]: I0930 10:43:21.032446 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e5bde710-0351-45d0-acb8-990719f9ba34","Type":"ContainerStarted","Data":"da0b086ea8720e6fe9e082ea5b71864165a3bcb6c004026a5514da44facb3fc9"} Sep 30 10:43:21 crc kubenswrapper[4730]: I0930 10:43:21.946843 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Sep 30 10:43:21 crc kubenswrapper[4730]: I0930 10:43:21.952764 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Sep 30 10:43:22 crc kubenswrapper[4730]: I0930 10:43:22.042513 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e5bde710-0351-45d0-acb8-990719f9ba34","Type":"ContainerStarted","Data":"7a22cef82796cf5ba3970a4045c02119cf3d7067ca1453aedb63f8c0becb527d"} Sep 30 10:43:22 crc kubenswrapper[4730]: I0930 10:43:22.069274 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.069254187 podStartE2EDuration="5.069254187s" podCreationTimestamp="2025-09-30 10:43:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:43:22.062282826 +0000 UTC m=+3246.395542829" watchObservedRunningTime="2025-09-30 10:43:22.069254187 +0000 UTC m=+3246.402514180" Sep 30 10:43:22 crc kubenswrapper[4730]: I0930 10:43:22.218337 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume2-0" Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.091769 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f965598bf-7jtmk" event={"ID":"d563cf1f-fec6-4264-986f-61763e37c786","Type":"ContainerStarted","Data":"daeb44b046ef59da8efb4519d933855d712991e8852be47f89a70aeedb15deb1"} Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.092208 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f965598bf-7jtmk" event={"ID":"d563cf1f-fec6-4264-986f-61763e37c786","Type":"ContainerStarted","Data":"de51a8e419dff37e3cff76358a987db708da1aadad7f4c0b59f3f8de28ec3591"} Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.091934 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6f965598bf-7jtmk" podUID="d563cf1f-fec6-4264-986f-61763e37c786" containerName="horizon" containerID="cri-o://daeb44b046ef59da8efb4519d933855d712991e8852be47f89a70aeedb15deb1" gracePeriod=30 Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.091867 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6f965598bf-7jtmk" podUID="d563cf1f-fec6-4264-986f-61763e37c786" containerName="horizon-log" containerID="cri-o://de51a8e419dff37e3cff76358a987db708da1aadad7f4c0b59f3f8de28ec3591" gracePeriod=30 Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.098497 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9f68988b-b4q58" event={"ID":"73859337-4ff6-4ada-bc9b-a29b6b1fc478","Type":"ContainerStarted","Data":"557e1e9813f526b4a98af975d0241cf7b762bb8da50ef8d24dfd539054cf17f2"} Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.098542 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6b9f68988b-b4q58" event={"ID":"73859337-4ff6-4ada-bc9b-a29b6b1fc478","Type":"ContainerStarted","Data":"6af140d917a199fe92318241fc17d3512ed19b32fe499908198c461da89dd20a"} Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.101149 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78fdfbc44d-gv74b" event={"ID":"4e4cdc0d-7159-473c-b832-8628f59a1ebb","Type":"ContainerStarted","Data":"c4d703c5c9262f414f21052c01ef40c2fc862d36c48852e1414f0bd4c82f16a6"} Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.101192 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78fdfbc44d-gv74b" event={"ID":"4e4cdc0d-7159-473c-b832-8628f59a1ebb","Type":"ContainerStarted","Data":"ae52ffdfb2419402b2a5404015387f27588d5828e67f202689072c042ffa5057"} Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.107091 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8532ec07-5ee5-40c9-82f5-df62806d03f5","Type":"ContainerStarted","Data":"d7c462ee97c468d132a290457a13361e7010b7c4f1d1e57b99ebd742d492cda0"} Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.109984 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-866b9f44b9-x6nw7" event={"ID":"13559e77-1b10-43eb-af92-407513986ad3","Type":"ContainerStarted","Data":"b3fe6852c2e4ffa35fb9eef68dd9d1b1f9d6e768901665e53e8cf3e9e9729e46"} Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.110027 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-866b9f44b9-x6nw7" event={"ID":"13559e77-1b10-43eb-af92-407513986ad3","Type":"ContainerStarted","Data":"31a7e64f12fae9666484d2f5a68a5bb27d2c5690994a8f03b1f7b8f83e77eb62"} Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.110138 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-866b9f44b9-x6nw7" podUID="13559e77-1b10-43eb-af92-407513986ad3" containerName="horizon-log" containerID="cri-o://31a7e64f12fae9666484d2f5a68a5bb27d2c5690994a8f03b1f7b8f83e77eb62" gracePeriod=30 Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.110157 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-866b9f44b9-x6nw7" podUID="13559e77-1b10-43eb-af92-407513986ad3" containerName="horizon" containerID="cri-o://b3fe6852c2e4ffa35fb9eef68dd9d1b1f9d6e768901665e53e8cf3e9e9729e46" gracePeriod=30 Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.123319 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6f965598bf-7jtmk" podStartSLOduration=3.254855601 podStartE2EDuration="16.123301914s" podCreationTimestamp="2025-09-30 10:43:11 +0000 UTC" firstStartedPulling="2025-09-30 10:43:13.113839684 +0000 UTC m=+3237.447099677" lastFinishedPulling="2025-09-30 10:43:25.982285997 +0000 UTC m=+3250.315545990" observedRunningTime="2025-09-30 10:43:27.115670906 +0000 UTC m=+3251.448930899" watchObservedRunningTime="2025-09-30 10:43:27.123301914 +0000 UTC m=+3251.456561907" Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.137184 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-78fdfbc44d-gv74b" podStartSLOduration=3.180786931 podStartE2EDuration="13.137165374s" podCreationTimestamp="2025-09-30 10:43:14 +0000 UTC" firstStartedPulling="2025-09-30 10:43:16.038713276 +0000 UTC m=+3240.371973269" lastFinishedPulling="2025-09-30 10:43:25.995091719 +0000 UTC m=+3250.328351712" observedRunningTime="2025-09-30 10:43:27.136421795 +0000 UTC m=+3251.469681798" watchObservedRunningTime="2025-09-30 10:43:27.137165374 +0000 UTC m=+3251.470425367" Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.167029 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=8.167006949 podStartE2EDuration="8.167006949s" podCreationTimestamp="2025-09-30 10:43:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:43:27.157892923 +0000 UTC m=+3251.491152926" watchObservedRunningTime="2025-09-30 10:43:27.167006949 +0000 UTC m=+3251.500266962" Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.187545 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6b9f68988b-b4q58" podStartSLOduration=3.4737072700000002 podStartE2EDuration="13.187521272s" podCreationTimestamp="2025-09-30 10:43:14 +0000 UTC" firstStartedPulling="2025-09-30 10:43:16.317722624 +0000 UTC m=+3240.650982617" lastFinishedPulling="2025-09-30 10:43:26.031536636 +0000 UTC m=+3250.364796619" observedRunningTime="2025-09-30 10:43:27.179563735 +0000 UTC m=+3251.512823728" watchObservedRunningTime="2025-09-30 10:43:27.187521272 +0000 UTC m=+3251.520781265" Sep 30 10:43:27 crc kubenswrapper[4730]: I0930 10:43:27.204995 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-866b9f44b9-x6nw7" podStartSLOduration=2.874125237 podStartE2EDuration="15.204971975s" podCreationTimestamp="2025-09-30 10:43:12 +0000 UTC" firstStartedPulling="2025-09-30 10:43:13.672862625 +0000 UTC m=+3238.006122618" lastFinishedPulling="2025-09-30 10:43:26.003709363 +0000 UTC m=+3250.336969356" observedRunningTime="2025-09-30 10:43:27.200602192 +0000 UTC m=+3251.533862185" watchObservedRunningTime="2025-09-30 10:43:27.204971975 +0000 UTC m=+3251.538231968" Sep 30 10:43:28 crc kubenswrapper[4730]: I0930 10:43:28.653162 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 10:43:28 crc kubenswrapper[4730]: I0930 10:43:28.653435 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 10:43:28 crc kubenswrapper[4730]: I0930 10:43:28.699342 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 10:43:28 crc kubenswrapper[4730]: I0930 10:43:28.709517 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 10:43:29 crc kubenswrapper[4730]: I0930 10:43:29.131954 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 10:43:29 crc kubenswrapper[4730]: I0930 10:43:29.132246 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 10:43:29 crc kubenswrapper[4730]: I0930 10:43:29.492672 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:29 crc kubenswrapper[4730]: I0930 10:43:29.493139 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:29 crc kubenswrapper[4730]: I0930 10:43:29.526844 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:29 crc kubenswrapper[4730]: I0930 10:43:29.564581 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:30 crc kubenswrapper[4730]: I0930 10:43:30.137924 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:30 crc kubenswrapper[4730]: I0930 10:43:30.137965 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:31 crc kubenswrapper[4730]: I0930 10:43:31.149275 4730 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 10:43:31 crc kubenswrapper[4730]: I0930 10:43:31.149594 4730 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 10:43:32 crc kubenswrapper[4730]: I0930 10:43:32.331056 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:32 crc kubenswrapper[4730]: I0930 10:43:32.788540 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:33 crc kubenswrapper[4730]: I0930 10:43:33.450701 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 10:43:33 crc kubenswrapper[4730]: I0930 10:43:33.450766 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 10:43:33 crc kubenswrapper[4730]: I0930 10:43:33.454050 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:33 crc kubenswrapper[4730]: I0930 10:43:33.454097 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 10:43:35 crc kubenswrapper[4730]: I0930 10:43:35.382686 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:35 crc kubenswrapper[4730]: I0930 10:43:35.382937 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:35 crc kubenswrapper[4730]: I0930 10:43:35.432723 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:35 crc kubenswrapper[4730]: I0930 10:43:35.432973 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:47 crc kubenswrapper[4730]: I0930 10:43:47.542213 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:47 crc kubenswrapper[4730]: I0930 10:43:47.572835 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:49 crc kubenswrapper[4730]: I0930 10:43:49.234769 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:43:49 crc kubenswrapper[4730]: I0930 10:43:49.320360 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6b9f68988b-b4q58" Sep 30 10:43:49 crc kubenswrapper[4730]: I0930 10:43:49.391993 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-78fdfbc44d-gv74b"] Sep 30 10:43:49 crc kubenswrapper[4730]: I0930 10:43:49.392198 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-78fdfbc44d-gv74b" podUID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" containerName="horizon-log" containerID="cri-o://ae52ffdfb2419402b2a5404015387f27588d5828e67f202689072c042ffa5057" gracePeriod=30 Sep 30 10:43:49 crc kubenswrapper[4730]: I0930 10:43:49.392625 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-78fdfbc44d-gv74b" podUID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" containerName="horizon" containerID="cri-o://c4d703c5c9262f414f21052c01ef40c2fc862d36c48852e1414f0bd4c82f16a6" gracePeriod=30 Sep 30 10:43:50 crc kubenswrapper[4730]: I0930 10:43:50.352056 4730 generic.go:334] "Generic (PLEG): container finished" podID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" containerID="c4d703c5c9262f414f21052c01ef40c2fc862d36c48852e1414f0bd4c82f16a6" exitCode=0 Sep 30 10:43:50 crc kubenswrapper[4730]: I0930 10:43:50.352126 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78fdfbc44d-gv74b" event={"ID":"4e4cdc0d-7159-473c-b832-8628f59a1ebb","Type":"ContainerDied","Data":"c4d703c5c9262f414f21052c01ef40c2fc862d36c48852e1414f0bd4c82f16a6"} Sep 30 10:43:55 crc kubenswrapper[4730]: I0930 10:43:55.383331 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-78fdfbc44d-gv74b" podUID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.3:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.3:8443: connect: connection refused" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.431812 4730 generic.go:334] "Generic (PLEG): container finished" podID="13559e77-1b10-43eb-af92-407513986ad3" containerID="b3fe6852c2e4ffa35fb9eef68dd9d1b1f9d6e768901665e53e8cf3e9e9729e46" exitCode=137 Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.432067 4730 generic.go:334] "Generic (PLEG): container finished" podID="13559e77-1b10-43eb-af92-407513986ad3" containerID="31a7e64f12fae9666484d2f5a68a5bb27d2c5690994a8f03b1f7b8f83e77eb62" exitCode=137 Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.432119 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-866b9f44b9-x6nw7" event={"ID":"13559e77-1b10-43eb-af92-407513986ad3","Type":"ContainerDied","Data":"b3fe6852c2e4ffa35fb9eef68dd9d1b1f9d6e768901665e53e8cf3e9e9729e46"} Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.432144 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-866b9f44b9-x6nw7" event={"ID":"13559e77-1b10-43eb-af92-407513986ad3","Type":"ContainerDied","Data":"31a7e64f12fae9666484d2f5a68a5bb27d2c5690994a8f03b1f7b8f83e77eb62"} Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.435511 4730 generic.go:334] "Generic (PLEG): container finished" podID="d563cf1f-fec6-4264-986f-61763e37c786" containerID="daeb44b046ef59da8efb4519d933855d712991e8852be47f89a70aeedb15deb1" exitCode=137 Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.435532 4730 generic.go:334] "Generic (PLEG): container finished" podID="d563cf1f-fec6-4264-986f-61763e37c786" containerID="de51a8e419dff37e3cff76358a987db708da1aadad7f4c0b59f3f8de28ec3591" exitCode=137 Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.435547 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f965598bf-7jtmk" event={"ID":"d563cf1f-fec6-4264-986f-61763e37c786","Type":"ContainerDied","Data":"daeb44b046ef59da8efb4519d933855d712991e8852be47f89a70aeedb15deb1"} Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.435564 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f965598bf-7jtmk" event={"ID":"d563cf1f-fec6-4264-986f-61763e37c786","Type":"ContainerDied","Data":"de51a8e419dff37e3cff76358a987db708da1aadad7f4c0b59f3f8de28ec3591"} Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.557332 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.563498 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.670915 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d563cf1f-fec6-4264-986f-61763e37c786-scripts\") pod \"d563cf1f-fec6-4264-986f-61763e37c786\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.671236 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d563cf1f-fec6-4264-986f-61763e37c786-config-data\") pod \"d563cf1f-fec6-4264-986f-61763e37c786\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.671279 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hl8cx\" (UniqueName: \"kubernetes.io/projected/d563cf1f-fec6-4264-986f-61763e37c786-kube-api-access-hl8cx\") pod \"d563cf1f-fec6-4264-986f-61763e37c786\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.671294 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/13559e77-1b10-43eb-af92-407513986ad3-config-data\") pod \"13559e77-1b10-43eb-af92-407513986ad3\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.671354 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/13559e77-1b10-43eb-af92-407513986ad3-horizon-secret-key\") pod \"13559e77-1b10-43eb-af92-407513986ad3\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.671375 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d67wh\" (UniqueName: \"kubernetes.io/projected/13559e77-1b10-43eb-af92-407513986ad3-kube-api-access-d67wh\") pod \"13559e77-1b10-43eb-af92-407513986ad3\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.671409 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d563cf1f-fec6-4264-986f-61763e37c786-logs\") pod \"d563cf1f-fec6-4264-986f-61763e37c786\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.671486 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13559e77-1b10-43eb-af92-407513986ad3-scripts\") pod \"13559e77-1b10-43eb-af92-407513986ad3\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.671543 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d563cf1f-fec6-4264-986f-61763e37c786-horizon-secret-key\") pod \"d563cf1f-fec6-4264-986f-61763e37c786\" (UID: \"d563cf1f-fec6-4264-986f-61763e37c786\") " Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.671560 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13559e77-1b10-43eb-af92-407513986ad3-logs\") pod \"13559e77-1b10-43eb-af92-407513986ad3\" (UID: \"13559e77-1b10-43eb-af92-407513986ad3\") " Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.673063 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13559e77-1b10-43eb-af92-407513986ad3-logs" (OuterVolumeSpecName: "logs") pod "13559e77-1b10-43eb-af92-407513986ad3" (UID: "13559e77-1b10-43eb-af92-407513986ad3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.673067 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d563cf1f-fec6-4264-986f-61763e37c786-logs" (OuterVolumeSpecName: "logs") pod "d563cf1f-fec6-4264-986f-61763e37c786" (UID: "d563cf1f-fec6-4264-986f-61763e37c786"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.677434 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d563cf1f-fec6-4264-986f-61763e37c786-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "d563cf1f-fec6-4264-986f-61763e37c786" (UID: "d563cf1f-fec6-4264-986f-61763e37c786"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.677902 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13559e77-1b10-43eb-af92-407513986ad3-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "13559e77-1b10-43eb-af92-407513986ad3" (UID: "13559e77-1b10-43eb-af92-407513986ad3"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.678366 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13559e77-1b10-43eb-af92-407513986ad3-kube-api-access-d67wh" (OuterVolumeSpecName: "kube-api-access-d67wh") pod "13559e77-1b10-43eb-af92-407513986ad3" (UID: "13559e77-1b10-43eb-af92-407513986ad3"). InnerVolumeSpecName "kube-api-access-d67wh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.679776 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d563cf1f-fec6-4264-986f-61763e37c786-kube-api-access-hl8cx" (OuterVolumeSpecName: "kube-api-access-hl8cx") pod "d563cf1f-fec6-4264-986f-61763e37c786" (UID: "d563cf1f-fec6-4264-986f-61763e37c786"). InnerVolumeSpecName "kube-api-access-hl8cx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.699677 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d563cf1f-fec6-4264-986f-61763e37c786-config-data" (OuterVolumeSpecName: "config-data") pod "d563cf1f-fec6-4264-986f-61763e37c786" (UID: "d563cf1f-fec6-4264-986f-61763e37c786"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.700719 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13559e77-1b10-43eb-af92-407513986ad3-scripts" (OuterVolumeSpecName: "scripts") pod "13559e77-1b10-43eb-af92-407513986ad3" (UID: "13559e77-1b10-43eb-af92-407513986ad3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.702033 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13559e77-1b10-43eb-af92-407513986ad3-config-data" (OuterVolumeSpecName: "config-data") pod "13559e77-1b10-43eb-af92-407513986ad3" (UID: "13559e77-1b10-43eb-af92-407513986ad3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.705756 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d563cf1f-fec6-4264-986f-61763e37c786-scripts" (OuterVolumeSpecName: "scripts") pod "d563cf1f-fec6-4264-986f-61763e37c786" (UID: "d563cf1f-fec6-4264-986f-61763e37c786"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.774100 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d563cf1f-fec6-4264-986f-61763e37c786-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.774130 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hl8cx\" (UniqueName: \"kubernetes.io/projected/d563cf1f-fec6-4264-986f-61763e37c786-kube-api-access-hl8cx\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.774142 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/13559e77-1b10-43eb-af92-407513986ad3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.774150 4730 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/13559e77-1b10-43eb-af92-407513986ad3-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.774175 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d67wh\" (UniqueName: \"kubernetes.io/projected/13559e77-1b10-43eb-af92-407513986ad3-kube-api-access-d67wh\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.774185 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d563cf1f-fec6-4264-986f-61763e37c786-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.774192 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13559e77-1b10-43eb-af92-407513986ad3-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.774200 4730 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d563cf1f-fec6-4264-986f-61763e37c786-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.774207 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13559e77-1b10-43eb-af92-407513986ad3-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:57 crc kubenswrapper[4730]: I0930 10:43:57.774214 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d563cf1f-fec6-4264-986f-61763e37c786-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:43:58 crc kubenswrapper[4730]: I0930 10:43:58.453682 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-866b9f44b9-x6nw7" Sep 30 10:43:58 crc kubenswrapper[4730]: I0930 10:43:58.453690 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-866b9f44b9-x6nw7" event={"ID":"13559e77-1b10-43eb-af92-407513986ad3","Type":"ContainerDied","Data":"8bccc496d6a29afe6d4ed69a2b68b3832d4d0314ba4a43437c368398b90e2165"} Sep 30 10:43:58 crc kubenswrapper[4730]: I0930 10:43:58.453768 4730 scope.go:117] "RemoveContainer" containerID="b3fe6852c2e4ffa35fb9eef68dd9d1b1f9d6e768901665e53e8cf3e9e9729e46" Sep 30 10:43:58 crc kubenswrapper[4730]: I0930 10:43:58.458822 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6f965598bf-7jtmk" event={"ID":"d563cf1f-fec6-4264-986f-61763e37c786","Type":"ContainerDied","Data":"1e301ba986dbe1522591209b08d67aac46076405063cac4f2f6ac63ef36d269f"} Sep 30 10:43:58 crc kubenswrapper[4730]: I0930 10:43:58.458943 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6f965598bf-7jtmk" Sep 30 10:43:58 crc kubenswrapper[4730]: I0930 10:43:58.498934 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-866b9f44b9-x6nw7"] Sep 30 10:43:58 crc kubenswrapper[4730]: I0930 10:43:58.517276 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-866b9f44b9-x6nw7"] Sep 30 10:43:58 crc kubenswrapper[4730]: I0930 10:43:58.529720 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6f965598bf-7jtmk"] Sep 30 10:43:58 crc kubenswrapper[4730]: I0930 10:43:58.538245 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6f965598bf-7jtmk"] Sep 30 10:43:58 crc kubenswrapper[4730]: I0930 10:43:58.669420 4730 scope.go:117] "RemoveContainer" containerID="31a7e64f12fae9666484d2f5a68a5bb27d2c5690994a8f03b1f7b8f83e77eb62" Sep 30 10:43:58 crc kubenswrapper[4730]: I0930 10:43:58.687766 4730 scope.go:117] "RemoveContainer" containerID="daeb44b046ef59da8efb4519d933855d712991e8852be47f89a70aeedb15deb1" Sep 30 10:43:58 crc kubenswrapper[4730]: I0930 10:43:58.874545 4730 scope.go:117] "RemoveContainer" containerID="de51a8e419dff37e3cff76358a987db708da1aadad7f4c0b59f3f8de28ec3591" Sep 30 10:44:00 crc kubenswrapper[4730]: I0930 10:44:00.393761 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13559e77-1b10-43eb-af92-407513986ad3" path="/var/lib/kubelet/pods/13559e77-1b10-43eb-af92-407513986ad3/volumes" Sep 30 10:44:00 crc kubenswrapper[4730]: I0930 10:44:00.394415 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d563cf1f-fec6-4264-986f-61763e37c786" path="/var/lib/kubelet/pods/d563cf1f-fec6-4264-986f-61763e37c786/volumes" Sep 30 10:44:05 crc kubenswrapper[4730]: I0930 10:44:05.383997 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-78fdfbc44d-gv74b" podUID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.3:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.3:8443: connect: connection refused" Sep 30 10:44:15 crc kubenswrapper[4730]: I0930 10:44:15.384824 4730 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-78fdfbc44d-gv74b" podUID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.3:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.3:8443: connect: connection refused" Sep 30 10:44:15 crc kubenswrapper[4730]: I0930 10:44:15.385503 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.698973 4730 generic.go:334] "Generic (PLEG): container finished" podID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" containerID="ae52ffdfb2419402b2a5404015387f27588d5828e67f202689072c042ffa5057" exitCode=137 Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.699098 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78fdfbc44d-gv74b" event={"ID":"4e4cdc0d-7159-473c-b832-8628f59a1ebb","Type":"ContainerDied","Data":"ae52ffdfb2419402b2a5404015387f27588d5828e67f202689072c042ffa5057"} Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.833276 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.980981 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-horizon-tls-certs\") pod \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.981035 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-horizon-secret-key\") pod \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.981080 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e4cdc0d-7159-473c-b832-8628f59a1ebb-logs\") pod \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.981157 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7p4pq\" (UniqueName: \"kubernetes.io/projected/4e4cdc0d-7159-473c-b832-8628f59a1ebb-kube-api-access-7p4pq\") pod \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.982103 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e4cdc0d-7159-473c-b832-8628f59a1ebb-logs" (OuterVolumeSpecName: "logs") pod "4e4cdc0d-7159-473c-b832-8628f59a1ebb" (UID: "4e4cdc0d-7159-473c-b832-8628f59a1ebb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.982203 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e4cdc0d-7159-473c-b832-8628f59a1ebb-scripts\") pod \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.982469 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e4cdc0d-7159-473c-b832-8628f59a1ebb-config-data\") pod \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.982507 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-combined-ca-bundle\") pod \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\" (UID: \"4e4cdc0d-7159-473c-b832-8628f59a1ebb\") " Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.983095 4730 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e4cdc0d-7159-473c-b832-8628f59a1ebb-logs\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.987410 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "4e4cdc0d-7159-473c-b832-8628f59a1ebb" (UID: "4e4cdc0d-7159-473c-b832-8628f59a1ebb"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:44:19 crc kubenswrapper[4730]: I0930 10:44:19.996986 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e4cdc0d-7159-473c-b832-8628f59a1ebb-kube-api-access-7p4pq" (OuterVolumeSpecName: "kube-api-access-7p4pq") pod "4e4cdc0d-7159-473c-b832-8628f59a1ebb" (UID: "4e4cdc0d-7159-473c-b832-8628f59a1ebb"). InnerVolumeSpecName "kube-api-access-7p4pq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.014223 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e4cdc0d-7159-473c-b832-8628f59a1ebb" (UID: "4e4cdc0d-7159-473c-b832-8628f59a1ebb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.016687 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e4cdc0d-7159-473c-b832-8628f59a1ebb-config-data" (OuterVolumeSpecName: "config-data") pod "4e4cdc0d-7159-473c-b832-8628f59a1ebb" (UID: "4e4cdc0d-7159-473c-b832-8628f59a1ebb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.018100 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e4cdc0d-7159-473c-b832-8628f59a1ebb-scripts" (OuterVolumeSpecName: "scripts") pod "4e4cdc0d-7159-473c-b832-8628f59a1ebb" (UID: "4e4cdc0d-7159-473c-b832-8628f59a1ebb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.043900 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "4e4cdc0d-7159-473c-b832-8628f59a1ebb" (UID: "4e4cdc0d-7159-473c-b832-8628f59a1ebb"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.084827 4730 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.084867 4730 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.084879 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7p4pq\" (UniqueName: \"kubernetes.io/projected/4e4cdc0d-7159-473c-b832-8628f59a1ebb-kube-api-access-7p4pq\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.084893 4730 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e4cdc0d-7159-473c-b832-8628f59a1ebb-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.084903 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e4cdc0d-7159-473c-b832-8628f59a1ebb-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.084913 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e4cdc0d-7159-473c-b832-8628f59a1ebb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.710631 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78fdfbc44d-gv74b" event={"ID":"4e4cdc0d-7159-473c-b832-8628f59a1ebb","Type":"ContainerDied","Data":"16183743c9f9a57fc8a190966203be7e22dc811b57b6c2fb27ac437607a33a20"} Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.710916 4730 scope.go:117] "RemoveContainer" containerID="c4d703c5c9262f414f21052c01ef40c2fc862d36c48852e1414f0bd4c82f16a6" Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.710751 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78fdfbc44d-gv74b" Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.744508 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-78fdfbc44d-gv74b"] Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.754081 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-78fdfbc44d-gv74b"] Sep 30 10:44:20 crc kubenswrapper[4730]: I0930 10:44:20.935234 4730 scope.go:117] "RemoveContainer" containerID="ae52ffdfb2419402b2a5404015387f27588d5828e67f202689072c042ffa5057" Sep 30 10:44:22 crc kubenswrapper[4730]: I0930 10:44:22.395339 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" path="/var/lib/kubelet/pods/4e4cdc0d-7159-473c-b832-8628f59a1ebb/volumes" Sep 30 10:44:23 crc kubenswrapper[4730]: I0930 10:44:23.003924 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:44:23 crc kubenswrapper[4730]: I0930 10:44:23.004407 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="prometheus" containerID="cri-o://a561ef0dd4d1e16e5f8d236ce3741951e8ea4002afbc19febbe41202eecc609d" gracePeriod=600 Sep 30 10:44:23 crc kubenswrapper[4730]: I0930 10:44:23.004470 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="thanos-sidecar" containerID="cri-o://af16d88b26372b7af8fd00163d08b8dcabdf539a068578c7c5ab22e51ec68665" gracePeriod=600 Sep 30 10:44:23 crc kubenswrapper[4730]: I0930 10:44:23.004472 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="config-reloader" containerID="cri-o://efae9502c6e2b8bf6458b3ffeccb24c8a6b267c7cff07ffe3c4a6e19b8b4bc04" gracePeriod=600 Sep 30 10:44:23 crc kubenswrapper[4730]: I0930 10:44:23.749519 4730 generic.go:334] "Generic (PLEG): container finished" podID="711be93d-c342-44a5-aac9-ace1d09682a0" containerID="af16d88b26372b7af8fd00163d08b8dcabdf539a068578c7c5ab22e51ec68665" exitCode=0 Sep 30 10:44:23 crc kubenswrapper[4730]: I0930 10:44:23.749855 4730 generic.go:334] "Generic (PLEG): container finished" podID="711be93d-c342-44a5-aac9-ace1d09682a0" containerID="efae9502c6e2b8bf6458b3ffeccb24c8a6b267c7cff07ffe3c4a6e19b8b4bc04" exitCode=0 Sep 30 10:44:23 crc kubenswrapper[4730]: I0930 10:44:23.749867 4730 generic.go:334] "Generic (PLEG): container finished" podID="711be93d-c342-44a5-aac9-ace1d09682a0" containerID="a561ef0dd4d1e16e5f8d236ce3741951e8ea4002afbc19febbe41202eecc609d" exitCode=0 Sep 30 10:44:23 crc kubenswrapper[4730]: I0930 10:44:23.749681 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"711be93d-c342-44a5-aac9-ace1d09682a0","Type":"ContainerDied","Data":"af16d88b26372b7af8fd00163d08b8dcabdf539a068578c7c5ab22e51ec68665"} Sep 30 10:44:23 crc kubenswrapper[4730]: I0930 10:44:23.749905 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"711be93d-c342-44a5-aac9-ace1d09682a0","Type":"ContainerDied","Data":"efae9502c6e2b8bf6458b3ffeccb24c8a6b267c7cff07ffe3c4a6e19b8b4bc04"} Sep 30 10:44:23 crc kubenswrapper[4730]: I0930 10:44:23.749921 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"711be93d-c342-44a5-aac9-ace1d09682a0","Type":"ContainerDied","Data":"a561ef0dd4d1e16e5f8d236ce3741951e8ea4002afbc19febbe41202eecc609d"} Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.008290 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.167078 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config\") pod \"711be93d-c342-44a5-aac9-ace1d09682a0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.167133 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-secret-combined-ca-bundle\") pod \"711be93d-c342-44a5-aac9-ace1d09682a0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.167387 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"711be93d-c342-44a5-aac9-ace1d09682a0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.167452 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"711be93d-c342-44a5-aac9-ace1d09682a0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.167518 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-thanos-prometheus-http-client-file\") pod \"711be93d-c342-44a5-aac9-ace1d09682a0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.167567 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dqhg\" (UniqueName: \"kubernetes.io/projected/711be93d-c342-44a5-aac9-ace1d09682a0-kube-api-access-6dqhg\") pod \"711be93d-c342-44a5-aac9-ace1d09682a0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.167634 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-config\") pod \"711be93d-c342-44a5-aac9-ace1d09682a0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.167663 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/711be93d-c342-44a5-aac9-ace1d09682a0-prometheus-metric-storage-rulefiles-0\") pod \"711be93d-c342-44a5-aac9-ace1d09682a0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.167735 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/711be93d-c342-44a5-aac9-ace1d09682a0-config-out\") pod \"711be93d-c342-44a5-aac9-ace1d09682a0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.167844 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"711be93d-c342-44a5-aac9-ace1d09682a0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.167926 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/711be93d-c342-44a5-aac9-ace1d09682a0-tls-assets\") pod \"711be93d-c342-44a5-aac9-ace1d09682a0\" (UID: \"711be93d-c342-44a5-aac9-ace1d09682a0\") " Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.169355 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/711be93d-c342-44a5-aac9-ace1d09682a0-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "711be93d-c342-44a5-aac9-ace1d09682a0" (UID: "711be93d-c342-44a5-aac9-ace1d09682a0"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.174522 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-secret-combined-ca-bundle" (OuterVolumeSpecName: "secret-combined-ca-bundle") pod "711be93d-c342-44a5-aac9-ace1d09682a0" (UID: "711be93d-c342-44a5-aac9-ace1d09682a0"). InnerVolumeSpecName "secret-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.175049 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/711be93d-c342-44a5-aac9-ace1d09682a0-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "711be93d-c342-44a5-aac9-ace1d09682a0" (UID: "711be93d-c342-44a5-aac9-ace1d09682a0"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.176764 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d") pod "711be93d-c342-44a5-aac9-ace1d09682a0" (UID: "711be93d-c342-44a5-aac9-ace1d09682a0"). InnerVolumeSpecName "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.176901 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/711be93d-c342-44a5-aac9-ace1d09682a0-kube-api-access-6dqhg" (OuterVolumeSpecName: "kube-api-access-6dqhg") pod "711be93d-c342-44a5-aac9-ace1d09682a0" (UID: "711be93d-c342-44a5-aac9-ace1d09682a0"). InnerVolumeSpecName "kube-api-access-6dqhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.179733 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/711be93d-c342-44a5-aac9-ace1d09682a0-config-out" (OuterVolumeSpecName: "config-out") pod "711be93d-c342-44a5-aac9-ace1d09682a0" (UID: "711be93d-c342-44a5-aac9-ace1d09682a0"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.181251 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d") pod "711be93d-c342-44a5-aac9-ace1d09682a0" (UID: "711be93d-c342-44a5-aac9-ace1d09682a0"). InnerVolumeSpecName "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.181394 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "711be93d-c342-44a5-aac9-ace1d09682a0" (UID: "711be93d-c342-44a5-aac9-ace1d09682a0"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.197142 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-config" (OuterVolumeSpecName: "config") pod "711be93d-c342-44a5-aac9-ace1d09682a0" (UID: "711be93d-c342-44a5-aac9-ace1d09682a0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.231527 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "711be93d-c342-44a5-aac9-ace1d09682a0" (UID: "711be93d-c342-44a5-aac9-ace1d09682a0"). InnerVolumeSpecName "pvc-5d620c66-5a11-463c-a9e7-c12e856084b2". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.271255 4730 reconciler_common.go:293] "Volume detached for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-secret-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.271305 4730 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") on node \"crc\" " Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.271320 4730 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.271330 4730 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.271340 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dqhg\" (UniqueName: \"kubernetes.io/projected/711be93d-c342-44a5-aac9-ace1d09682a0-kube-api-access-6dqhg\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.271348 4730 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.271356 4730 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/711be93d-c342-44a5-aac9-ace1d09682a0-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.271373 4730 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/711be93d-c342-44a5-aac9-ace1d09682a0-config-out\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.271384 4730 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.271393 4730 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/711be93d-c342-44a5-aac9-ace1d09682a0-tls-assets\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.289320 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config" (OuterVolumeSpecName: "web-config") pod "711be93d-c342-44a5-aac9-ace1d09682a0" (UID: "711be93d-c342-44a5-aac9-ace1d09682a0"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.324115 4730 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.324475 4730 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-5d620c66-5a11-463c-a9e7-c12e856084b2" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2") on node "crc" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.373217 4730 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/711be93d-c342-44a5-aac9-ace1d09682a0-web-config\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.373253 4730 reconciler_common.go:293] "Volume detached for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") on node \"crc\" DevicePath \"\"" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.764372 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"711be93d-c342-44a5-aac9-ace1d09682a0","Type":"ContainerDied","Data":"be460ee00e5e6131357dc0d1b42ce99d4d9caee73f3d5512358485f4db656511"} Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.764423 4730 scope.go:117] "RemoveContainer" containerID="af16d88b26372b7af8fd00163d08b8dcabdf539a068578c7c5ab22e51ec68665" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.764975 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.797930 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.798114 4730 scope.go:117] "RemoveContainer" containerID="efae9502c6e2b8bf6458b3ffeccb24c8a6b267c7cff07ffe3c4a6e19b8b4bc04" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.805223 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.831270 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:44:24 crc kubenswrapper[4730]: E0930 10:44:24.831646 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" containerName="horizon" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.831665 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" containerName="horizon" Sep 30 10:44:24 crc kubenswrapper[4730]: E0930 10:44:24.831679 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" containerName="horizon-log" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.831687 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" containerName="horizon-log" Sep 30 10:44:24 crc kubenswrapper[4730]: E0930 10:44:24.831711 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="init-config-reloader" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.831719 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="init-config-reloader" Sep 30 10:44:24 crc kubenswrapper[4730]: E0930 10:44:24.831726 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d563cf1f-fec6-4264-986f-61763e37c786" containerName="horizon-log" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.831734 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="d563cf1f-fec6-4264-986f-61763e37c786" containerName="horizon-log" Sep 30 10:44:24 crc kubenswrapper[4730]: E0930 10:44:24.831749 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="prometheus" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.831755 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="prometheus" Sep 30 10:44:24 crc kubenswrapper[4730]: E0930 10:44:24.831766 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="thanos-sidecar" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.831771 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="thanos-sidecar" Sep 30 10:44:24 crc kubenswrapper[4730]: E0930 10:44:24.831786 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13559e77-1b10-43eb-af92-407513986ad3" containerName="horizon-log" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.831791 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="13559e77-1b10-43eb-af92-407513986ad3" containerName="horizon-log" Sep 30 10:44:24 crc kubenswrapper[4730]: E0930 10:44:24.831799 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13559e77-1b10-43eb-af92-407513986ad3" containerName="horizon" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.831804 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="13559e77-1b10-43eb-af92-407513986ad3" containerName="horizon" Sep 30 10:44:24 crc kubenswrapper[4730]: E0930 10:44:24.831815 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="config-reloader" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.831821 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="config-reloader" Sep 30 10:44:24 crc kubenswrapper[4730]: E0930 10:44:24.831829 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d563cf1f-fec6-4264-986f-61763e37c786" containerName="horizon" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.831837 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="d563cf1f-fec6-4264-986f-61763e37c786" containerName="horizon" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.832011 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="config-reloader" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.832020 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="13559e77-1b10-43eb-af92-407513986ad3" containerName="horizon" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.832034 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="13559e77-1b10-43eb-af92-407513986ad3" containerName="horizon-log" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.832043 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="prometheus" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.832057 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" containerName="horizon" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.832066 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e4cdc0d-7159-473c-b832-8628f59a1ebb" containerName="horizon-log" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.832075 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="d563cf1f-fec6-4264-986f-61763e37c786" containerName="horizon" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.832087 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="d563cf1f-fec6-4264-986f-61763e37c786" containerName="horizon-log" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.832099 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" containerName="thanos-sidecar" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.833892 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.835722 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.835914 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.836138 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.836707 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-vs854" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.836931 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.859161 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.876994 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.877210 4730 scope.go:117] "RemoveContainer" containerID="a561ef0dd4d1e16e5f8d236ce3741951e8ea4002afbc19febbe41202eecc609d" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.904862 4730 scope.go:117] "RemoveContainer" containerID="b12066acb5d61484a5a6d994e6136a6fb5d60df22d47239fe02e68f5b58fdfaf" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.984918 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.984955 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.984981 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.985012 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.985029 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-config\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.985043 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.985069 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqlff\" (UniqueName: \"kubernetes.io/projected/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-kube-api-access-qqlff\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.985100 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.985159 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.985236 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:24 crc kubenswrapper[4730]: I0930 10:44:24.985267 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.088134 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.088171 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-config\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.088188 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.088219 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqlff\" (UniqueName: \"kubernetes.io/projected/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-kube-api-access-qqlff\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.088252 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.088313 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.088392 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.088423 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.088447 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.088463 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.088481 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.088932 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.094335 4730 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.094385 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/cf539b04350c5d85f90d0468a4b3f4f72d24a709bb4a2121a25d26c6e8fc960c/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.097112 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.098557 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.098722 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.100138 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.100487 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.118658 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-config\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.118864 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.120344 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.121834 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqlff\" (UniqueName: \"kubernetes.io/projected/aae3dec7-c6e3-4bd3-ad48-96d4d959d228-kube-api-access-qqlff\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.154573 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5d620c66-5a11-463c-a9e7-c12e856084b2\") pod \"prometheus-metric-storage-0\" (UID: \"aae3dec7-c6e3-4bd3-ad48-96d4d959d228\") " pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.197259 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:25 crc kubenswrapper[4730]: W0930 10:44:25.753209 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaae3dec7_c6e3_4bd3_ad48_96d4d959d228.slice/crio-e2d7a0e41d186a652a57d04c15063996db49c300cff3cb16b81e0e3d77751047 WatchSource:0}: Error finding container e2d7a0e41d186a652a57d04c15063996db49c300cff3cb16b81e0e3d77751047: Status 404 returned error can't find the container with id e2d7a0e41d186a652a57d04c15063996db49c300cff3cb16b81e0e3d77751047 Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.765992 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 10:44:25 crc kubenswrapper[4730]: I0930 10:44:25.779080 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"aae3dec7-c6e3-4bd3-ad48-96d4d959d228","Type":"ContainerStarted","Data":"e2d7a0e41d186a652a57d04c15063996db49c300cff3cb16b81e0e3d77751047"} Sep 30 10:44:26 crc kubenswrapper[4730]: I0930 10:44:26.391201 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="711be93d-c342-44a5-aac9-ace1d09682a0" path="/var/lib/kubelet/pods/711be93d-c342-44a5-aac9-ace1d09682a0/volumes" Sep 30 10:44:29 crc kubenswrapper[4730]: I0930 10:44:29.823532 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"aae3dec7-c6e3-4bd3-ad48-96d4d959d228","Type":"ContainerStarted","Data":"9435ddb3fc6e6c82090b292cfbda0ef20969b4fb332b011390e2e0b64d0f06a1"} Sep 30 10:44:32 crc kubenswrapper[4730]: I0930 10:44:32.336495 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:44:32 crc kubenswrapper[4730]: I0930 10:44:32.337100 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:44:37 crc kubenswrapper[4730]: I0930 10:44:37.894847 4730 generic.go:334] "Generic (PLEG): container finished" podID="aae3dec7-c6e3-4bd3-ad48-96d4d959d228" containerID="9435ddb3fc6e6c82090b292cfbda0ef20969b4fb332b011390e2e0b64d0f06a1" exitCode=0 Sep 30 10:44:37 crc kubenswrapper[4730]: I0930 10:44:37.895022 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"aae3dec7-c6e3-4bd3-ad48-96d4d959d228","Type":"ContainerDied","Data":"9435ddb3fc6e6c82090b292cfbda0ef20969b4fb332b011390e2e0b64d0f06a1"} Sep 30 10:44:38 crc kubenswrapper[4730]: I0930 10:44:38.907013 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"aae3dec7-c6e3-4bd3-ad48-96d4d959d228","Type":"ContainerStarted","Data":"7057144075fcda5e9676328b3a948afe4d9979451dd722f22b631b76dbe67db0"} Sep 30 10:44:42 crc kubenswrapper[4730]: I0930 10:44:42.963816 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"aae3dec7-c6e3-4bd3-ad48-96d4d959d228","Type":"ContainerStarted","Data":"c1b3a8dd90b6db03dc6989e17034df04e341317e5cbd7293601d5ed9ac3c2543"} Sep 30 10:44:42 crc kubenswrapper[4730]: I0930 10:44:42.964436 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"aae3dec7-c6e3-4bd3-ad48-96d4d959d228","Type":"ContainerStarted","Data":"d68dde0fc612ed08755deb5cf58ee9d1715d6baa6019744207c86b00c3816a77"} Sep 30 10:44:43 crc kubenswrapper[4730]: I0930 10:44:43.001825 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=19.001802586 podStartE2EDuration="19.001802586s" podCreationTimestamp="2025-09-30 10:44:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 10:44:42.992639128 +0000 UTC m=+3327.325899161" watchObservedRunningTime="2025-09-30 10:44:43.001802586 +0000 UTC m=+3327.335062589" Sep 30 10:44:45 crc kubenswrapper[4730]: I0930 10:44:45.198366 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:55 crc kubenswrapper[4730]: I0930 10:44:55.198257 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:55 crc kubenswrapper[4730]: I0930 10:44:55.207362 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Sep 30 10:44:56 crc kubenswrapper[4730]: I0930 10:44:56.097049 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.176694 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74"] Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.178526 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.180595 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.180641 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.188366 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74"] Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.235683 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90e7dbe7-4eee-4da1-9b22-5d290321a79b-secret-volume\") pod \"collect-profiles-29320485-nlf74\" (UID: \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.235773 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90e7dbe7-4eee-4da1-9b22-5d290321a79b-config-volume\") pod \"collect-profiles-29320485-nlf74\" (UID: \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.235800 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5hp8\" (UniqueName: \"kubernetes.io/projected/90e7dbe7-4eee-4da1-9b22-5d290321a79b-kube-api-access-w5hp8\") pod \"collect-profiles-29320485-nlf74\" (UID: \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.337149 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5hp8\" (UniqueName: \"kubernetes.io/projected/90e7dbe7-4eee-4da1-9b22-5d290321a79b-kube-api-access-w5hp8\") pod \"collect-profiles-29320485-nlf74\" (UID: \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.337338 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90e7dbe7-4eee-4da1-9b22-5d290321a79b-secret-volume\") pod \"collect-profiles-29320485-nlf74\" (UID: \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.337402 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90e7dbe7-4eee-4da1-9b22-5d290321a79b-config-volume\") pod \"collect-profiles-29320485-nlf74\" (UID: \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.338603 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90e7dbe7-4eee-4da1-9b22-5d290321a79b-config-volume\") pod \"collect-profiles-29320485-nlf74\" (UID: \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.344010 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90e7dbe7-4eee-4da1-9b22-5d290321a79b-secret-volume\") pod \"collect-profiles-29320485-nlf74\" (UID: \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.356196 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5hp8\" (UniqueName: \"kubernetes.io/projected/90e7dbe7-4eee-4da1-9b22-5d290321a79b-kube-api-access-w5hp8\") pod \"collect-profiles-29320485-nlf74\" (UID: \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.497430 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" Sep 30 10:45:00 crc kubenswrapper[4730]: I0930 10:45:00.952334 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74"] Sep 30 10:45:01 crc kubenswrapper[4730]: I0930 10:45:01.141517 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" event={"ID":"90e7dbe7-4eee-4da1-9b22-5d290321a79b","Type":"ContainerStarted","Data":"8697daaa29d53d4fb75aa2dd164dc9f933e98e2f64431bdbdaa620bb44691672"} Sep 30 10:45:01 crc kubenswrapper[4730]: I0930 10:45:01.141570 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" event={"ID":"90e7dbe7-4eee-4da1-9b22-5d290321a79b","Type":"ContainerStarted","Data":"ddc26c7bc0dd94a91e5da4d8742b8e8bd4de4a8416cade7af26fa1be2be28986"} Sep 30 10:45:02 crc kubenswrapper[4730]: I0930 10:45:02.152157 4730 generic.go:334] "Generic (PLEG): container finished" podID="90e7dbe7-4eee-4da1-9b22-5d290321a79b" containerID="8697daaa29d53d4fb75aa2dd164dc9f933e98e2f64431bdbdaa620bb44691672" exitCode=0 Sep 30 10:45:02 crc kubenswrapper[4730]: I0930 10:45:02.152204 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" event={"ID":"90e7dbe7-4eee-4da1-9b22-5d290321a79b","Type":"ContainerDied","Data":"8697daaa29d53d4fb75aa2dd164dc9f933e98e2f64431bdbdaa620bb44691672"} Sep 30 10:45:02 crc kubenswrapper[4730]: I0930 10:45:02.336820 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:45:02 crc kubenswrapper[4730]: I0930 10:45:02.336889 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:45:03 crc kubenswrapper[4730]: I0930 10:45:03.555092 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" Sep 30 10:45:03 crc kubenswrapper[4730]: I0930 10:45:03.604833 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90e7dbe7-4eee-4da1-9b22-5d290321a79b-config-volume\") pod \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\" (UID: \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\") " Sep 30 10:45:03 crc kubenswrapper[4730]: I0930 10:45:03.605055 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5hp8\" (UniqueName: \"kubernetes.io/projected/90e7dbe7-4eee-4da1-9b22-5d290321a79b-kube-api-access-w5hp8\") pod \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\" (UID: \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\") " Sep 30 10:45:03 crc kubenswrapper[4730]: I0930 10:45:03.605162 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90e7dbe7-4eee-4da1-9b22-5d290321a79b-secret-volume\") pod \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\" (UID: \"90e7dbe7-4eee-4da1-9b22-5d290321a79b\") " Sep 30 10:45:03 crc kubenswrapper[4730]: I0930 10:45:03.605759 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90e7dbe7-4eee-4da1-9b22-5d290321a79b-config-volume" (OuterVolumeSpecName: "config-volume") pod "90e7dbe7-4eee-4da1-9b22-5d290321a79b" (UID: "90e7dbe7-4eee-4da1-9b22-5d290321a79b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 10:45:03 crc kubenswrapper[4730]: I0930 10:45:03.606096 4730 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90e7dbe7-4eee-4da1-9b22-5d290321a79b-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 10:45:03 crc kubenswrapper[4730]: I0930 10:45:03.614844 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90e7dbe7-4eee-4da1-9b22-5d290321a79b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "90e7dbe7-4eee-4da1-9b22-5d290321a79b" (UID: "90e7dbe7-4eee-4da1-9b22-5d290321a79b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 10:45:03 crc kubenswrapper[4730]: I0930 10:45:03.616003 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90e7dbe7-4eee-4da1-9b22-5d290321a79b-kube-api-access-w5hp8" (OuterVolumeSpecName: "kube-api-access-w5hp8") pod "90e7dbe7-4eee-4da1-9b22-5d290321a79b" (UID: "90e7dbe7-4eee-4da1-9b22-5d290321a79b"). InnerVolumeSpecName "kube-api-access-w5hp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:45:03 crc kubenswrapper[4730]: I0930 10:45:03.708093 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5hp8\" (UniqueName: \"kubernetes.io/projected/90e7dbe7-4eee-4da1-9b22-5d290321a79b-kube-api-access-w5hp8\") on node \"crc\" DevicePath \"\"" Sep 30 10:45:03 crc kubenswrapper[4730]: I0930 10:45:03.708125 4730 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90e7dbe7-4eee-4da1-9b22-5d290321a79b-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 10:45:04 crc kubenswrapper[4730]: I0930 10:45:04.171579 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" event={"ID":"90e7dbe7-4eee-4da1-9b22-5d290321a79b","Type":"ContainerDied","Data":"ddc26c7bc0dd94a91e5da4d8742b8e8bd4de4a8416cade7af26fa1be2be28986"} Sep 30 10:45:04 crc kubenswrapper[4730]: I0930 10:45:04.171844 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ddc26c7bc0dd94a91e5da4d8742b8e8bd4de4a8416cade7af26fa1be2be28986" Sep 30 10:45:04 crc kubenswrapper[4730]: I0930 10:45:04.171681 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74" Sep 30 10:45:04 crc kubenswrapper[4730]: I0930 10:45:04.642843 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9"] Sep 30 10:45:04 crc kubenswrapper[4730]: I0930 10:45:04.653266 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320440-ddpz9"] Sep 30 10:45:06 crc kubenswrapper[4730]: I0930 10:45:06.396417 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec8158aa-12b3-457a-9ad3-7c7da7819f1c" path="/var/lib/kubelet/pods/ec8158aa-12b3-457a-9ad3-7c7da7819f1c/volumes" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.200430 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Sep 30 10:45:20 crc kubenswrapper[4730]: E0930 10:45:20.201606 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90e7dbe7-4eee-4da1-9b22-5d290321a79b" containerName="collect-profiles" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.201656 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="90e7dbe7-4eee-4da1-9b22-5d290321a79b" containerName="collect-profiles" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.201929 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="90e7dbe7-4eee-4da1-9b22-5d290321a79b" containerName="collect-profiles" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.202854 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.209027 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.209654 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.209714 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.209658 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-thzbw" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.216119 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.369536 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5d6d300c-5857-4de1-8317-cded656bc61e-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.369950 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.369978 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.369995 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.370031 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.370112 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d6d300c-5857-4de1-8317-cded656bc61e-config-data\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.370138 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzs5v\" (UniqueName: \"kubernetes.io/projected/5d6d300c-5857-4de1-8317-cded656bc61e-kube-api-access-dzs5v\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.370179 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5d6d300c-5857-4de1-8317-cded656bc61e-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.370225 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5d6d300c-5857-4de1-8317-cded656bc61e-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.471581 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d6d300c-5857-4de1-8317-cded656bc61e-config-data\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.471658 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzs5v\" (UniqueName: \"kubernetes.io/projected/5d6d300c-5857-4de1-8317-cded656bc61e-kube-api-access-dzs5v\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.471711 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5d6d300c-5857-4de1-8317-cded656bc61e-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.471771 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5d6d300c-5857-4de1-8317-cded656bc61e-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.471801 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5d6d300c-5857-4de1-8317-cded656bc61e-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.471862 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.471881 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.471897 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.471941 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.473225 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5d6d300c-5857-4de1-8317-cded656bc61e-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.473279 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.473815 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d6d300c-5857-4de1-8317-cded656bc61e-config-data\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.473834 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5d6d300c-5857-4de1-8317-cded656bc61e-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.476326 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5d6d300c-5857-4de1-8317-cded656bc61e-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.490269 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5c6gr"] Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.491286 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.494366 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.495055 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.495326 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5c6gr"] Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.495422 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.500700 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzs5v\" (UniqueName: \"kubernetes.io/projected/5d6d300c-5857-4de1-8317-cded656bc61e-kube-api-access-dzs5v\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.516992 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.538008 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.573963 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-utilities\") pod \"community-operators-5c6gr\" (UID: \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\") " pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.574128 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-catalog-content\") pod \"community-operators-5c6gr\" (UID: \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\") " pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.574188 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtx5r\" (UniqueName: \"kubernetes.io/projected/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-kube-api-access-wtx5r\") pod \"community-operators-5c6gr\" (UID: \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\") " pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.676583 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-utilities\") pod \"community-operators-5c6gr\" (UID: \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\") " pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.676808 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-catalog-content\") pod \"community-operators-5c6gr\" (UID: \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\") " pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.676858 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtx5r\" (UniqueName: \"kubernetes.io/projected/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-kube-api-access-wtx5r\") pod \"community-operators-5c6gr\" (UID: \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\") " pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.677329 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-utilities\") pod \"community-operators-5c6gr\" (UID: \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\") " pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.677337 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-catalog-content\") pod \"community-operators-5c6gr\" (UID: \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\") " pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.696839 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtx5r\" (UniqueName: \"kubernetes.io/projected/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-kube-api-access-wtx5r\") pod \"community-operators-5c6gr\" (UID: \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\") " pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:20 crc kubenswrapper[4730]: I0930 10:45:20.996004 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:21 crc kubenswrapper[4730]: I0930 10:45:21.070220 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 30 10:45:21 crc kubenswrapper[4730]: W0930 10:45:21.077591 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d6d300c_5857_4de1_8317_cded656bc61e.slice/crio-60d2dd3529780567a657518727a20d1b4b6aca8b6472a77440f82f1077c8325f WatchSource:0}: Error finding container 60d2dd3529780567a657518727a20d1b4b6aca8b6472a77440f82f1077c8325f: Status 404 returned error can't find the container with id 60d2dd3529780567a657518727a20d1b4b6aca8b6472a77440f82f1077c8325f Sep 30 10:45:21 crc kubenswrapper[4730]: I0930 10:45:21.338524 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5d6d300c-5857-4de1-8317-cded656bc61e","Type":"ContainerStarted","Data":"60d2dd3529780567a657518727a20d1b4b6aca8b6472a77440f82f1077c8325f"} Sep 30 10:45:21 crc kubenswrapper[4730]: I0930 10:45:21.468194 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5c6gr"] Sep 30 10:45:22 crc kubenswrapper[4730]: I0930 10:45:22.353305 4730 generic.go:334] "Generic (PLEG): container finished" podID="585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" containerID="f9a42fda014d4fa99aca47e0b4c6c45b4adeec58a1ed22fd957a509af8bd36e0" exitCode=0 Sep 30 10:45:22 crc kubenswrapper[4730]: I0930 10:45:22.353425 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5c6gr" event={"ID":"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3","Type":"ContainerDied","Data":"f9a42fda014d4fa99aca47e0b4c6c45b4adeec58a1ed22fd957a509af8bd36e0"} Sep 30 10:45:22 crc kubenswrapper[4730]: I0930 10:45:22.353584 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5c6gr" event={"ID":"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3","Type":"ContainerStarted","Data":"01de41181be9810c382c553ae49539b4abc50a43948bb60d7395ef24fd46d6bc"} Sep 30 10:45:26 crc kubenswrapper[4730]: I0930 10:45:26.035003 4730 scope.go:117] "RemoveContainer" containerID="3789d69ce5f0a1bb8549c278ad2732ec821cfaa4a7a02b77047ccc1636bfeae7" Sep 30 10:45:32 crc kubenswrapper[4730]: I0930 10:45:32.336462 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:45:32 crc kubenswrapper[4730]: I0930 10:45:32.337072 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:45:32 crc kubenswrapper[4730]: I0930 10:45:32.337128 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 10:45:32 crc kubenswrapper[4730]: I0930 10:45:32.338020 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"356c135f70f8fbd71ae905359591919213eb62e111b8492c4fab1464016900ca"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 10:45:32 crc kubenswrapper[4730]: I0930 10:45:32.338088 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://356c135f70f8fbd71ae905359591919213eb62e111b8492c4fab1464016900ca" gracePeriod=600 Sep 30 10:45:32 crc kubenswrapper[4730]: I0930 10:45:32.551534 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="356c135f70f8fbd71ae905359591919213eb62e111b8492c4fab1464016900ca" exitCode=0 Sep 30 10:45:32 crc kubenswrapper[4730]: I0930 10:45:32.551879 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"356c135f70f8fbd71ae905359591919213eb62e111b8492c4fab1464016900ca"} Sep 30 10:45:32 crc kubenswrapper[4730]: I0930 10:45:32.551940 4730 scope.go:117] "RemoveContainer" containerID="7e6c79d1416bb74cb6515c7fc45c6c769987357541374ab109fdf2b032a93fd0" Sep 30 10:45:32 crc kubenswrapper[4730]: I0930 10:45:32.554368 4730 generic.go:334] "Generic (PLEG): container finished" podID="585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" containerID="4754323e6ccb7e45f5b1647bc2fcd93fad32e784d43a129fe7a4fa3b95d1eb76" exitCode=0 Sep 30 10:45:32 crc kubenswrapper[4730]: I0930 10:45:32.554426 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5c6gr" event={"ID":"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3","Type":"ContainerDied","Data":"4754323e6ccb7e45f5b1647bc2fcd93fad32e784d43a129fe7a4fa3b95d1eb76"} Sep 30 10:45:32 crc kubenswrapper[4730]: I0930 10:45:32.556425 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5d6d300c-5857-4de1-8317-cded656bc61e","Type":"ContainerStarted","Data":"daab50ecd778c1b470cb438a599cf601b52b412c0492d9e4766f8c4faed80864"} Sep 30 10:45:33 crc kubenswrapper[4730]: I0930 10:45:33.569209 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b"} Sep 30 10:45:33 crc kubenswrapper[4730]: I0930 10:45:33.594249 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.126746164 podStartE2EDuration="14.594229943s" podCreationTimestamp="2025-09-30 10:45:19 +0000 UTC" firstStartedPulling="2025-09-30 10:45:21.079823007 +0000 UTC m=+3365.413083000" lastFinishedPulling="2025-09-30 10:45:31.547306786 +0000 UTC m=+3375.880566779" observedRunningTime="2025-09-30 10:45:32.589097595 +0000 UTC m=+3376.922357618" watchObservedRunningTime="2025-09-30 10:45:33.594229943 +0000 UTC m=+3377.927489936" Sep 30 10:45:34 crc kubenswrapper[4730]: I0930 10:45:34.579337 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5c6gr" event={"ID":"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3","Type":"ContainerStarted","Data":"09235bdafeec7206460b3704cc7427701441b83ff15ca3cf33ed3b7ba0884628"} Sep 30 10:45:34 crc kubenswrapper[4730]: I0930 10:45:34.604228 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5c6gr" podStartSLOduration=5.914064125 podStartE2EDuration="14.604209758s" podCreationTimestamp="2025-09-30 10:45:20 +0000 UTC" firstStartedPulling="2025-09-30 10:45:24.594198411 +0000 UTC m=+3368.927458414" lastFinishedPulling="2025-09-30 10:45:33.284344044 +0000 UTC m=+3377.617604047" observedRunningTime="2025-09-30 10:45:34.594057183 +0000 UTC m=+3378.927317166" watchObservedRunningTime="2025-09-30 10:45:34.604209758 +0000 UTC m=+3378.937469751" Sep 30 10:45:40 crc kubenswrapper[4730]: I0930 10:45:40.997090 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:40 crc kubenswrapper[4730]: I0930 10:45:40.997736 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:41 crc kubenswrapper[4730]: I0930 10:45:41.052262 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:41 crc kubenswrapper[4730]: I0930 10:45:41.700925 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:41 crc kubenswrapper[4730]: I0930 10:45:41.754300 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5c6gr"] Sep 30 10:45:43 crc kubenswrapper[4730]: I0930 10:45:43.655332 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5c6gr" podUID="585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" containerName="registry-server" containerID="cri-o://09235bdafeec7206460b3704cc7427701441b83ff15ca3cf33ed3b7ba0884628" gracePeriod=2 Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.179552 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.333422 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-catalog-content\") pod \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\" (UID: \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\") " Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.333623 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtx5r\" (UniqueName: \"kubernetes.io/projected/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-kube-api-access-wtx5r\") pod \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\" (UID: \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\") " Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.333700 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-utilities\") pod \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\" (UID: \"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3\") " Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.334529 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-utilities" (OuterVolumeSpecName: "utilities") pod "585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" (UID: "585c31ac-4577-4d75-8ac7-ab95ff1dcdd3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.339956 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-kube-api-access-wtx5r" (OuterVolumeSpecName: "kube-api-access-wtx5r") pod "585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" (UID: "585c31ac-4577-4d75-8ac7-ab95ff1dcdd3"). InnerVolumeSpecName "kube-api-access-wtx5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.388433 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" (UID: "585c31ac-4577-4d75-8ac7-ab95ff1dcdd3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.437412 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.437441 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.437456 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtx5r\" (UniqueName: \"kubernetes.io/projected/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3-kube-api-access-wtx5r\") on node \"crc\" DevicePath \"\"" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.666673 4730 generic.go:334] "Generic (PLEG): container finished" podID="585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" containerID="09235bdafeec7206460b3704cc7427701441b83ff15ca3cf33ed3b7ba0884628" exitCode=0 Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.666713 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5c6gr" event={"ID":"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3","Type":"ContainerDied","Data":"09235bdafeec7206460b3704cc7427701441b83ff15ca3cf33ed3b7ba0884628"} Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.666742 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5c6gr" event={"ID":"585c31ac-4577-4d75-8ac7-ab95ff1dcdd3","Type":"ContainerDied","Data":"01de41181be9810c382c553ae49539b4abc50a43948bb60d7395ef24fd46d6bc"} Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.666762 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5c6gr" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.666762 4730 scope.go:117] "RemoveContainer" containerID="09235bdafeec7206460b3704cc7427701441b83ff15ca3cf33ed3b7ba0884628" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.703423 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5c6gr"] Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.707043 4730 scope.go:117] "RemoveContainer" containerID="4754323e6ccb7e45f5b1647bc2fcd93fad32e784d43a129fe7a4fa3b95d1eb76" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.717883 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5c6gr"] Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.727934 4730 scope.go:117] "RemoveContainer" containerID="f9a42fda014d4fa99aca47e0b4c6c45b4adeec58a1ed22fd957a509af8bd36e0" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.784415 4730 scope.go:117] "RemoveContainer" containerID="09235bdafeec7206460b3704cc7427701441b83ff15ca3cf33ed3b7ba0884628" Sep 30 10:45:44 crc kubenswrapper[4730]: E0930 10:45:44.784947 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09235bdafeec7206460b3704cc7427701441b83ff15ca3cf33ed3b7ba0884628\": container with ID starting with 09235bdafeec7206460b3704cc7427701441b83ff15ca3cf33ed3b7ba0884628 not found: ID does not exist" containerID="09235bdafeec7206460b3704cc7427701441b83ff15ca3cf33ed3b7ba0884628" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.784975 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09235bdafeec7206460b3704cc7427701441b83ff15ca3cf33ed3b7ba0884628"} err="failed to get container status \"09235bdafeec7206460b3704cc7427701441b83ff15ca3cf33ed3b7ba0884628\": rpc error: code = NotFound desc = could not find container \"09235bdafeec7206460b3704cc7427701441b83ff15ca3cf33ed3b7ba0884628\": container with ID starting with 09235bdafeec7206460b3704cc7427701441b83ff15ca3cf33ed3b7ba0884628 not found: ID does not exist" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.784995 4730 scope.go:117] "RemoveContainer" containerID="4754323e6ccb7e45f5b1647bc2fcd93fad32e784d43a129fe7a4fa3b95d1eb76" Sep 30 10:45:44 crc kubenswrapper[4730]: E0930 10:45:44.785416 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4754323e6ccb7e45f5b1647bc2fcd93fad32e784d43a129fe7a4fa3b95d1eb76\": container with ID starting with 4754323e6ccb7e45f5b1647bc2fcd93fad32e784d43a129fe7a4fa3b95d1eb76 not found: ID does not exist" containerID="4754323e6ccb7e45f5b1647bc2fcd93fad32e784d43a129fe7a4fa3b95d1eb76" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.785450 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4754323e6ccb7e45f5b1647bc2fcd93fad32e784d43a129fe7a4fa3b95d1eb76"} err="failed to get container status \"4754323e6ccb7e45f5b1647bc2fcd93fad32e784d43a129fe7a4fa3b95d1eb76\": rpc error: code = NotFound desc = could not find container \"4754323e6ccb7e45f5b1647bc2fcd93fad32e784d43a129fe7a4fa3b95d1eb76\": container with ID starting with 4754323e6ccb7e45f5b1647bc2fcd93fad32e784d43a129fe7a4fa3b95d1eb76 not found: ID does not exist" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.785464 4730 scope.go:117] "RemoveContainer" containerID="f9a42fda014d4fa99aca47e0b4c6c45b4adeec58a1ed22fd957a509af8bd36e0" Sep 30 10:45:44 crc kubenswrapper[4730]: E0930 10:45:44.786216 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9a42fda014d4fa99aca47e0b4c6c45b4adeec58a1ed22fd957a509af8bd36e0\": container with ID starting with f9a42fda014d4fa99aca47e0b4c6c45b4adeec58a1ed22fd957a509af8bd36e0 not found: ID does not exist" containerID="f9a42fda014d4fa99aca47e0b4c6c45b4adeec58a1ed22fd957a509af8bd36e0" Sep 30 10:45:44 crc kubenswrapper[4730]: I0930 10:45:44.786235 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9a42fda014d4fa99aca47e0b4c6c45b4adeec58a1ed22fd957a509af8bd36e0"} err="failed to get container status \"f9a42fda014d4fa99aca47e0b4c6c45b4adeec58a1ed22fd957a509af8bd36e0\": rpc error: code = NotFound desc = could not find container \"f9a42fda014d4fa99aca47e0b4c6c45b4adeec58a1ed22fd957a509af8bd36e0\": container with ID starting with f9a42fda014d4fa99aca47e0b4c6c45b4adeec58a1ed22fd957a509af8bd36e0 not found: ID does not exist" Sep 30 10:45:46 crc kubenswrapper[4730]: I0930 10:45:46.392714 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" path="/var/lib/kubelet/pods/585c31ac-4577-4d75-8ac7-ab95ff1dcdd3/volumes" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.556654 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gmmq9"] Sep 30 10:46:31 crc kubenswrapper[4730]: E0930 10:46:31.557631 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" containerName="extract-utilities" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.557649 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" containerName="extract-utilities" Sep 30 10:46:31 crc kubenswrapper[4730]: E0930 10:46:31.557688 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" containerName="extract-content" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.557696 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" containerName="extract-content" Sep 30 10:46:31 crc kubenswrapper[4730]: E0930 10:46:31.557724 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" containerName="registry-server" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.557732 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" containerName="registry-server" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.557940 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="585c31ac-4577-4d75-8ac7-ab95ff1dcdd3" containerName="registry-server" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.559523 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.577525 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gmmq9"] Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.707138 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2jb5\" (UniqueName: \"kubernetes.io/projected/326a40ed-29cb-4969-9713-8ce5dfa690c9-kube-api-access-j2jb5\") pod \"certified-operators-gmmq9\" (UID: \"326a40ed-29cb-4969-9713-8ce5dfa690c9\") " pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.707391 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/326a40ed-29cb-4969-9713-8ce5dfa690c9-catalog-content\") pod \"certified-operators-gmmq9\" (UID: \"326a40ed-29cb-4969-9713-8ce5dfa690c9\") " pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.707646 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/326a40ed-29cb-4969-9713-8ce5dfa690c9-utilities\") pod \"certified-operators-gmmq9\" (UID: \"326a40ed-29cb-4969-9713-8ce5dfa690c9\") " pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.809597 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2jb5\" (UniqueName: \"kubernetes.io/projected/326a40ed-29cb-4969-9713-8ce5dfa690c9-kube-api-access-j2jb5\") pod \"certified-operators-gmmq9\" (UID: \"326a40ed-29cb-4969-9713-8ce5dfa690c9\") " pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.809732 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/326a40ed-29cb-4969-9713-8ce5dfa690c9-catalog-content\") pod \"certified-operators-gmmq9\" (UID: \"326a40ed-29cb-4969-9713-8ce5dfa690c9\") " pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.809848 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/326a40ed-29cb-4969-9713-8ce5dfa690c9-utilities\") pod \"certified-operators-gmmq9\" (UID: \"326a40ed-29cb-4969-9713-8ce5dfa690c9\") " pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.810353 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/326a40ed-29cb-4969-9713-8ce5dfa690c9-catalog-content\") pod \"certified-operators-gmmq9\" (UID: \"326a40ed-29cb-4969-9713-8ce5dfa690c9\") " pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.810361 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/326a40ed-29cb-4969-9713-8ce5dfa690c9-utilities\") pod \"certified-operators-gmmq9\" (UID: \"326a40ed-29cb-4969-9713-8ce5dfa690c9\") " pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.838125 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2jb5\" (UniqueName: \"kubernetes.io/projected/326a40ed-29cb-4969-9713-8ce5dfa690c9-kube-api-access-j2jb5\") pod \"certified-operators-gmmq9\" (UID: \"326a40ed-29cb-4969-9713-8ce5dfa690c9\") " pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:31 crc kubenswrapper[4730]: I0930 10:46:31.889515 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:32 crc kubenswrapper[4730]: I0930 10:46:32.400183 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gmmq9"] Sep 30 10:46:33 crc kubenswrapper[4730]: I0930 10:46:33.135159 4730 generic.go:334] "Generic (PLEG): container finished" podID="326a40ed-29cb-4969-9713-8ce5dfa690c9" containerID="b5d4d7ec134aaa86ce52a04b3a009a187622af868527461a1f100c22dfb23174" exitCode=0 Sep 30 10:46:33 crc kubenswrapper[4730]: I0930 10:46:33.135203 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gmmq9" event={"ID":"326a40ed-29cb-4969-9713-8ce5dfa690c9","Type":"ContainerDied","Data":"b5d4d7ec134aaa86ce52a04b3a009a187622af868527461a1f100c22dfb23174"} Sep 30 10:46:33 crc kubenswrapper[4730]: I0930 10:46:33.135379 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gmmq9" event={"ID":"326a40ed-29cb-4969-9713-8ce5dfa690c9","Type":"ContainerStarted","Data":"32c7584d5bf6dac55498b6878a8545a5274790e6eb7197c235286cd50ba1e130"} Sep 30 10:46:33 crc kubenswrapper[4730]: I0930 10:46:33.137636 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 10:46:34 crc kubenswrapper[4730]: I0930 10:46:34.144787 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gmmq9" event={"ID":"326a40ed-29cb-4969-9713-8ce5dfa690c9","Type":"ContainerStarted","Data":"06098c59aa378e75c40f41cbf34624ab13fd12adac65c2fff0e013805cee28d5"} Sep 30 10:46:36 crc kubenswrapper[4730]: I0930 10:46:36.164876 4730 generic.go:334] "Generic (PLEG): container finished" podID="326a40ed-29cb-4969-9713-8ce5dfa690c9" containerID="06098c59aa378e75c40f41cbf34624ab13fd12adac65c2fff0e013805cee28d5" exitCode=0 Sep 30 10:46:36 crc kubenswrapper[4730]: I0930 10:46:36.164948 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gmmq9" event={"ID":"326a40ed-29cb-4969-9713-8ce5dfa690c9","Type":"ContainerDied","Data":"06098c59aa378e75c40f41cbf34624ab13fd12adac65c2fff0e013805cee28d5"} Sep 30 10:46:37 crc kubenswrapper[4730]: I0930 10:46:37.175147 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gmmq9" event={"ID":"326a40ed-29cb-4969-9713-8ce5dfa690c9","Type":"ContainerStarted","Data":"0595894f3f418f0bb1396eb8957a1b98858844923e1f58a3f998a0f08317f6f6"} Sep 30 10:46:37 crc kubenswrapper[4730]: I0930 10:46:37.201423 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gmmq9" podStartSLOduration=2.686504697 podStartE2EDuration="6.201406685s" podCreationTimestamp="2025-09-30 10:46:31 +0000 UTC" firstStartedPulling="2025-09-30 10:46:33.137381874 +0000 UTC m=+3437.470641867" lastFinishedPulling="2025-09-30 10:46:36.652283862 +0000 UTC m=+3440.985543855" observedRunningTime="2025-09-30 10:46:37.197149564 +0000 UTC m=+3441.530409587" watchObservedRunningTime="2025-09-30 10:46:37.201406685 +0000 UTC m=+3441.534666678" Sep 30 10:46:41 crc kubenswrapper[4730]: I0930 10:46:41.889955 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:41 crc kubenswrapper[4730]: I0930 10:46:41.890541 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:41 crc kubenswrapper[4730]: I0930 10:46:41.939246 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:42 crc kubenswrapper[4730]: I0930 10:46:42.292078 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:42 crc kubenswrapper[4730]: I0930 10:46:42.352084 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gmmq9"] Sep 30 10:46:44 crc kubenswrapper[4730]: I0930 10:46:44.243090 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gmmq9" podUID="326a40ed-29cb-4969-9713-8ce5dfa690c9" containerName="registry-server" containerID="cri-o://0595894f3f418f0bb1396eb8957a1b98858844923e1f58a3f998a0f08317f6f6" gracePeriod=2 Sep 30 10:46:44 crc kubenswrapper[4730]: I0930 10:46:44.757210 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:44 crc kubenswrapper[4730]: I0930 10:46:44.898554 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/326a40ed-29cb-4969-9713-8ce5dfa690c9-utilities\") pod \"326a40ed-29cb-4969-9713-8ce5dfa690c9\" (UID: \"326a40ed-29cb-4969-9713-8ce5dfa690c9\") " Sep 30 10:46:44 crc kubenswrapper[4730]: I0930 10:46:44.898604 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2jb5\" (UniqueName: \"kubernetes.io/projected/326a40ed-29cb-4969-9713-8ce5dfa690c9-kube-api-access-j2jb5\") pod \"326a40ed-29cb-4969-9713-8ce5dfa690c9\" (UID: \"326a40ed-29cb-4969-9713-8ce5dfa690c9\") " Sep 30 10:46:44 crc kubenswrapper[4730]: I0930 10:46:44.898656 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/326a40ed-29cb-4969-9713-8ce5dfa690c9-catalog-content\") pod \"326a40ed-29cb-4969-9713-8ce5dfa690c9\" (UID: \"326a40ed-29cb-4969-9713-8ce5dfa690c9\") " Sep 30 10:46:44 crc kubenswrapper[4730]: I0930 10:46:44.899408 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/326a40ed-29cb-4969-9713-8ce5dfa690c9-utilities" (OuterVolumeSpecName: "utilities") pod "326a40ed-29cb-4969-9713-8ce5dfa690c9" (UID: "326a40ed-29cb-4969-9713-8ce5dfa690c9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:46:44 crc kubenswrapper[4730]: I0930 10:46:44.905584 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/326a40ed-29cb-4969-9713-8ce5dfa690c9-kube-api-access-j2jb5" (OuterVolumeSpecName: "kube-api-access-j2jb5") pod "326a40ed-29cb-4969-9713-8ce5dfa690c9" (UID: "326a40ed-29cb-4969-9713-8ce5dfa690c9"). InnerVolumeSpecName "kube-api-access-j2jb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:46:44 crc kubenswrapper[4730]: I0930 10:46:44.943838 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/326a40ed-29cb-4969-9713-8ce5dfa690c9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "326a40ed-29cb-4969-9713-8ce5dfa690c9" (UID: "326a40ed-29cb-4969-9713-8ce5dfa690c9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.001186 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/326a40ed-29cb-4969-9713-8ce5dfa690c9-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.001216 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2jb5\" (UniqueName: \"kubernetes.io/projected/326a40ed-29cb-4969-9713-8ce5dfa690c9-kube-api-access-j2jb5\") on node \"crc\" DevicePath \"\"" Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.001226 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/326a40ed-29cb-4969-9713-8ce5dfa690c9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.256096 4730 generic.go:334] "Generic (PLEG): container finished" podID="326a40ed-29cb-4969-9713-8ce5dfa690c9" containerID="0595894f3f418f0bb1396eb8957a1b98858844923e1f58a3f998a0f08317f6f6" exitCode=0 Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.256157 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gmmq9" event={"ID":"326a40ed-29cb-4969-9713-8ce5dfa690c9","Type":"ContainerDied","Data":"0595894f3f418f0bb1396eb8957a1b98858844923e1f58a3f998a0f08317f6f6"} Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.256170 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gmmq9" Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.256191 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gmmq9" event={"ID":"326a40ed-29cb-4969-9713-8ce5dfa690c9","Type":"ContainerDied","Data":"32c7584d5bf6dac55498b6878a8545a5274790e6eb7197c235286cd50ba1e130"} Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.256214 4730 scope.go:117] "RemoveContainer" containerID="0595894f3f418f0bb1396eb8957a1b98858844923e1f58a3f998a0f08317f6f6" Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.281746 4730 scope.go:117] "RemoveContainer" containerID="06098c59aa378e75c40f41cbf34624ab13fd12adac65c2fff0e013805cee28d5" Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.303816 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gmmq9"] Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.312078 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gmmq9"] Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.318719 4730 scope.go:117] "RemoveContainer" containerID="b5d4d7ec134aaa86ce52a04b3a009a187622af868527461a1f100c22dfb23174" Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.374492 4730 scope.go:117] "RemoveContainer" containerID="0595894f3f418f0bb1396eb8957a1b98858844923e1f58a3f998a0f08317f6f6" Sep 30 10:46:45 crc kubenswrapper[4730]: E0930 10:46:45.375310 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0595894f3f418f0bb1396eb8957a1b98858844923e1f58a3f998a0f08317f6f6\": container with ID starting with 0595894f3f418f0bb1396eb8957a1b98858844923e1f58a3f998a0f08317f6f6 not found: ID does not exist" containerID="0595894f3f418f0bb1396eb8957a1b98858844923e1f58a3f998a0f08317f6f6" Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.375395 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0595894f3f418f0bb1396eb8957a1b98858844923e1f58a3f998a0f08317f6f6"} err="failed to get container status \"0595894f3f418f0bb1396eb8957a1b98858844923e1f58a3f998a0f08317f6f6\": rpc error: code = NotFound desc = could not find container \"0595894f3f418f0bb1396eb8957a1b98858844923e1f58a3f998a0f08317f6f6\": container with ID starting with 0595894f3f418f0bb1396eb8957a1b98858844923e1f58a3f998a0f08317f6f6 not found: ID does not exist" Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.375448 4730 scope.go:117] "RemoveContainer" containerID="06098c59aa378e75c40f41cbf34624ab13fd12adac65c2fff0e013805cee28d5" Sep 30 10:46:45 crc kubenswrapper[4730]: E0930 10:46:45.376082 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06098c59aa378e75c40f41cbf34624ab13fd12adac65c2fff0e013805cee28d5\": container with ID starting with 06098c59aa378e75c40f41cbf34624ab13fd12adac65c2fff0e013805cee28d5 not found: ID does not exist" containerID="06098c59aa378e75c40f41cbf34624ab13fd12adac65c2fff0e013805cee28d5" Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.376121 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06098c59aa378e75c40f41cbf34624ab13fd12adac65c2fff0e013805cee28d5"} err="failed to get container status \"06098c59aa378e75c40f41cbf34624ab13fd12adac65c2fff0e013805cee28d5\": rpc error: code = NotFound desc = could not find container \"06098c59aa378e75c40f41cbf34624ab13fd12adac65c2fff0e013805cee28d5\": container with ID starting with 06098c59aa378e75c40f41cbf34624ab13fd12adac65c2fff0e013805cee28d5 not found: ID does not exist" Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.376150 4730 scope.go:117] "RemoveContainer" containerID="b5d4d7ec134aaa86ce52a04b3a009a187622af868527461a1f100c22dfb23174" Sep 30 10:46:45 crc kubenswrapper[4730]: E0930 10:46:45.376720 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5d4d7ec134aaa86ce52a04b3a009a187622af868527461a1f100c22dfb23174\": container with ID starting with b5d4d7ec134aaa86ce52a04b3a009a187622af868527461a1f100c22dfb23174 not found: ID does not exist" containerID="b5d4d7ec134aaa86ce52a04b3a009a187622af868527461a1f100c22dfb23174" Sep 30 10:46:45 crc kubenswrapper[4730]: I0930 10:46:45.376784 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5d4d7ec134aaa86ce52a04b3a009a187622af868527461a1f100c22dfb23174"} err="failed to get container status \"b5d4d7ec134aaa86ce52a04b3a009a187622af868527461a1f100c22dfb23174\": rpc error: code = NotFound desc = could not find container \"b5d4d7ec134aaa86ce52a04b3a009a187622af868527461a1f100c22dfb23174\": container with ID starting with b5d4d7ec134aaa86ce52a04b3a009a187622af868527461a1f100c22dfb23174 not found: ID does not exist" Sep 30 10:46:46 crc kubenswrapper[4730]: I0930 10:46:46.410369 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="326a40ed-29cb-4969-9713-8ce5dfa690c9" path="/var/lib/kubelet/pods/326a40ed-29cb-4969-9713-8ce5dfa690c9/volumes" Sep 30 10:47:32 crc kubenswrapper[4730]: I0930 10:47:32.336692 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:47:32 crc kubenswrapper[4730]: I0930 10:47:32.337240 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.193531 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9j8tq"] Sep 30 10:47:42 crc kubenswrapper[4730]: E0930 10:47:42.194431 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="326a40ed-29cb-4969-9713-8ce5dfa690c9" containerName="extract-content" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.194445 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="326a40ed-29cb-4969-9713-8ce5dfa690c9" containerName="extract-content" Sep 30 10:47:42 crc kubenswrapper[4730]: E0930 10:47:42.194460 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="326a40ed-29cb-4969-9713-8ce5dfa690c9" containerName="registry-server" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.194466 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="326a40ed-29cb-4969-9713-8ce5dfa690c9" containerName="registry-server" Sep 30 10:47:42 crc kubenswrapper[4730]: E0930 10:47:42.194495 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="326a40ed-29cb-4969-9713-8ce5dfa690c9" containerName="extract-utilities" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.194502 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="326a40ed-29cb-4969-9713-8ce5dfa690c9" containerName="extract-utilities" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.194739 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="326a40ed-29cb-4969-9713-8ce5dfa690c9" containerName="registry-server" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.196225 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.217139 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9j8tq"] Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.361815 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-utilities\") pod \"redhat-marketplace-9j8tq\" (UID: \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\") " pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.361854 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4qj9\" (UniqueName: \"kubernetes.io/projected/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-kube-api-access-p4qj9\") pod \"redhat-marketplace-9j8tq\" (UID: \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\") " pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.361886 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-catalog-content\") pod \"redhat-marketplace-9j8tq\" (UID: \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\") " pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.463753 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-catalog-content\") pod \"redhat-marketplace-9j8tq\" (UID: \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\") " pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.463992 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-utilities\") pod \"redhat-marketplace-9j8tq\" (UID: \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\") " pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.464011 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4qj9\" (UniqueName: \"kubernetes.io/projected/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-kube-api-access-p4qj9\") pod \"redhat-marketplace-9j8tq\" (UID: \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\") " pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.464248 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-catalog-content\") pod \"redhat-marketplace-9j8tq\" (UID: \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\") " pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.464494 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-utilities\") pod \"redhat-marketplace-9j8tq\" (UID: \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\") " pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.485590 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4qj9\" (UniqueName: \"kubernetes.io/projected/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-kube-api-access-p4qj9\") pod \"redhat-marketplace-9j8tq\" (UID: \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\") " pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:42 crc kubenswrapper[4730]: I0930 10:47:42.515599 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:43 crc kubenswrapper[4730]: I0930 10:47:43.006075 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9j8tq"] Sep 30 10:47:43 crc kubenswrapper[4730]: I0930 10:47:43.837945 4730 generic.go:334] "Generic (PLEG): container finished" podID="8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" containerID="0633214da969f876207f028f615344a6bc4decd79ea89d3882055edcb3f2d3a2" exitCode=0 Sep 30 10:47:43 crc kubenswrapper[4730]: I0930 10:47:43.838003 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9j8tq" event={"ID":"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d","Type":"ContainerDied","Data":"0633214da969f876207f028f615344a6bc4decd79ea89d3882055edcb3f2d3a2"} Sep 30 10:47:43 crc kubenswrapper[4730]: I0930 10:47:43.838302 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9j8tq" event={"ID":"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d","Type":"ContainerStarted","Data":"ec92546838ff31cf943985e24838b7601218b4c40eba46aab02174694ddf6275"} Sep 30 10:47:44 crc kubenswrapper[4730]: I0930 10:47:44.848405 4730 generic.go:334] "Generic (PLEG): container finished" podID="8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" containerID="dbd40944d875e58bce8489795c0de142967e84e9083c57bbf5143aeea4bdc245" exitCode=0 Sep 30 10:47:44 crc kubenswrapper[4730]: I0930 10:47:44.848646 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9j8tq" event={"ID":"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d","Type":"ContainerDied","Data":"dbd40944d875e58bce8489795c0de142967e84e9083c57bbf5143aeea4bdc245"} Sep 30 10:47:45 crc kubenswrapper[4730]: I0930 10:47:45.860419 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9j8tq" event={"ID":"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d","Type":"ContainerStarted","Data":"52a9e8afdee07c3390aef8b8e098ecafa5ca6d3b130ca1a3645d621cce2ddccb"} Sep 30 10:47:45 crc kubenswrapper[4730]: I0930 10:47:45.884943 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9j8tq" podStartSLOduration=2.084540956 podStartE2EDuration="3.88492646s" podCreationTimestamp="2025-09-30 10:47:42 +0000 UTC" firstStartedPulling="2025-09-30 10:47:43.841799501 +0000 UTC m=+3508.175059494" lastFinishedPulling="2025-09-30 10:47:45.642185005 +0000 UTC m=+3509.975444998" observedRunningTime="2025-09-30 10:47:45.881873411 +0000 UTC m=+3510.215133404" watchObservedRunningTime="2025-09-30 10:47:45.88492646 +0000 UTC m=+3510.218186453" Sep 30 10:47:52 crc kubenswrapper[4730]: I0930 10:47:52.516814 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:52 crc kubenswrapper[4730]: I0930 10:47:52.517317 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:52 crc kubenswrapper[4730]: I0930 10:47:52.587252 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:53 crc kubenswrapper[4730]: I0930 10:47:53.013019 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:53 crc kubenswrapper[4730]: I0930 10:47:53.077963 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9j8tq"] Sep 30 10:47:54 crc kubenswrapper[4730]: I0930 10:47:54.956662 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9j8tq" podUID="8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" containerName="registry-server" containerID="cri-o://52a9e8afdee07c3390aef8b8e098ecafa5ca6d3b130ca1a3645d621cce2ddccb" gracePeriod=2 Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.474665 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.641639 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4qj9\" (UniqueName: \"kubernetes.io/projected/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-kube-api-access-p4qj9\") pod \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\" (UID: \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\") " Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.641978 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-catalog-content\") pod \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\" (UID: \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\") " Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.642184 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-utilities\") pod \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\" (UID: \"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d\") " Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.643251 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-utilities" (OuterVolumeSpecName: "utilities") pod "8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" (UID: "8b6e25c9-31ec-4d10-a1a1-8310cd185e1d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.643843 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.652864 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-kube-api-access-p4qj9" (OuterVolumeSpecName: "kube-api-access-p4qj9") pod "8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" (UID: "8b6e25c9-31ec-4d10-a1a1-8310cd185e1d"). InnerVolumeSpecName "kube-api-access-p4qj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.660599 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" (UID: "8b6e25c9-31ec-4d10-a1a1-8310cd185e1d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.746685 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.746749 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4qj9\" (UniqueName: \"kubernetes.io/projected/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d-kube-api-access-p4qj9\") on node \"crc\" DevicePath \"\"" Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.975231 4730 generic.go:334] "Generic (PLEG): container finished" podID="8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" containerID="52a9e8afdee07c3390aef8b8e098ecafa5ca6d3b130ca1a3645d621cce2ddccb" exitCode=0 Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.975314 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9j8tq" event={"ID":"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d","Type":"ContainerDied","Data":"52a9e8afdee07c3390aef8b8e098ecafa5ca6d3b130ca1a3645d621cce2ddccb"} Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.975343 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9j8tq" Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.975385 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9j8tq" event={"ID":"8b6e25c9-31ec-4d10-a1a1-8310cd185e1d","Type":"ContainerDied","Data":"ec92546838ff31cf943985e24838b7601218b4c40eba46aab02174694ddf6275"} Sep 30 10:47:55 crc kubenswrapper[4730]: I0930 10:47:55.975430 4730 scope.go:117] "RemoveContainer" containerID="52a9e8afdee07c3390aef8b8e098ecafa5ca6d3b130ca1a3645d621cce2ddccb" Sep 30 10:47:56 crc kubenswrapper[4730]: I0930 10:47:56.012490 4730 scope.go:117] "RemoveContainer" containerID="dbd40944d875e58bce8489795c0de142967e84e9083c57bbf5143aeea4bdc245" Sep 30 10:47:56 crc kubenswrapper[4730]: I0930 10:47:56.035423 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9j8tq"] Sep 30 10:47:56 crc kubenswrapper[4730]: I0930 10:47:56.051835 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9j8tq"] Sep 30 10:47:56 crc kubenswrapper[4730]: I0930 10:47:56.052285 4730 scope.go:117] "RemoveContainer" containerID="0633214da969f876207f028f615344a6bc4decd79ea89d3882055edcb3f2d3a2" Sep 30 10:47:56 crc kubenswrapper[4730]: I0930 10:47:56.102915 4730 scope.go:117] "RemoveContainer" containerID="52a9e8afdee07c3390aef8b8e098ecafa5ca6d3b130ca1a3645d621cce2ddccb" Sep 30 10:47:56 crc kubenswrapper[4730]: E0930 10:47:56.103394 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52a9e8afdee07c3390aef8b8e098ecafa5ca6d3b130ca1a3645d621cce2ddccb\": container with ID starting with 52a9e8afdee07c3390aef8b8e098ecafa5ca6d3b130ca1a3645d621cce2ddccb not found: ID does not exist" containerID="52a9e8afdee07c3390aef8b8e098ecafa5ca6d3b130ca1a3645d621cce2ddccb" Sep 30 10:47:56 crc kubenswrapper[4730]: I0930 10:47:56.103439 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52a9e8afdee07c3390aef8b8e098ecafa5ca6d3b130ca1a3645d621cce2ddccb"} err="failed to get container status \"52a9e8afdee07c3390aef8b8e098ecafa5ca6d3b130ca1a3645d621cce2ddccb\": rpc error: code = NotFound desc = could not find container \"52a9e8afdee07c3390aef8b8e098ecafa5ca6d3b130ca1a3645d621cce2ddccb\": container with ID starting with 52a9e8afdee07c3390aef8b8e098ecafa5ca6d3b130ca1a3645d621cce2ddccb not found: ID does not exist" Sep 30 10:47:56 crc kubenswrapper[4730]: I0930 10:47:56.103466 4730 scope.go:117] "RemoveContainer" containerID="dbd40944d875e58bce8489795c0de142967e84e9083c57bbf5143aeea4bdc245" Sep 30 10:47:56 crc kubenswrapper[4730]: E0930 10:47:56.103931 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbd40944d875e58bce8489795c0de142967e84e9083c57bbf5143aeea4bdc245\": container with ID starting with dbd40944d875e58bce8489795c0de142967e84e9083c57bbf5143aeea4bdc245 not found: ID does not exist" containerID="dbd40944d875e58bce8489795c0de142967e84e9083c57bbf5143aeea4bdc245" Sep 30 10:47:56 crc kubenswrapper[4730]: I0930 10:47:56.103954 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbd40944d875e58bce8489795c0de142967e84e9083c57bbf5143aeea4bdc245"} err="failed to get container status \"dbd40944d875e58bce8489795c0de142967e84e9083c57bbf5143aeea4bdc245\": rpc error: code = NotFound desc = could not find container \"dbd40944d875e58bce8489795c0de142967e84e9083c57bbf5143aeea4bdc245\": container with ID starting with dbd40944d875e58bce8489795c0de142967e84e9083c57bbf5143aeea4bdc245 not found: ID does not exist" Sep 30 10:47:56 crc kubenswrapper[4730]: I0930 10:47:56.103967 4730 scope.go:117] "RemoveContainer" containerID="0633214da969f876207f028f615344a6bc4decd79ea89d3882055edcb3f2d3a2" Sep 30 10:47:56 crc kubenswrapper[4730]: E0930 10:47:56.104378 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0633214da969f876207f028f615344a6bc4decd79ea89d3882055edcb3f2d3a2\": container with ID starting with 0633214da969f876207f028f615344a6bc4decd79ea89d3882055edcb3f2d3a2 not found: ID does not exist" containerID="0633214da969f876207f028f615344a6bc4decd79ea89d3882055edcb3f2d3a2" Sep 30 10:47:56 crc kubenswrapper[4730]: I0930 10:47:56.104420 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0633214da969f876207f028f615344a6bc4decd79ea89d3882055edcb3f2d3a2"} err="failed to get container status \"0633214da969f876207f028f615344a6bc4decd79ea89d3882055edcb3f2d3a2\": rpc error: code = NotFound desc = could not find container \"0633214da969f876207f028f615344a6bc4decd79ea89d3882055edcb3f2d3a2\": container with ID starting with 0633214da969f876207f028f615344a6bc4decd79ea89d3882055edcb3f2d3a2 not found: ID does not exist" Sep 30 10:47:56 crc kubenswrapper[4730]: I0930 10:47:56.410434 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" path="/var/lib/kubelet/pods/8b6e25c9-31ec-4d10-a1a1-8310cd185e1d/volumes" Sep 30 10:48:02 crc kubenswrapper[4730]: I0930 10:48:02.336864 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:48:02 crc kubenswrapper[4730]: I0930 10:48:02.337163 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:48:32 crc kubenswrapper[4730]: I0930 10:48:32.337102 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:48:32 crc kubenswrapper[4730]: I0930 10:48:32.337662 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:48:32 crc kubenswrapper[4730]: I0930 10:48:32.337735 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 10:48:32 crc kubenswrapper[4730]: I0930 10:48:32.338810 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 10:48:32 crc kubenswrapper[4730]: I0930 10:48:32.338899 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" gracePeriod=600 Sep 30 10:48:32 crc kubenswrapper[4730]: E0930 10:48:32.484988 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:48:33 crc kubenswrapper[4730]: I0930 10:48:33.386787 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" exitCode=0 Sep 30 10:48:33 crc kubenswrapper[4730]: I0930 10:48:33.387165 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b"} Sep 30 10:48:33 crc kubenswrapper[4730]: I0930 10:48:33.387227 4730 scope.go:117] "RemoveContainer" containerID="356c135f70f8fbd71ae905359591919213eb62e111b8492c4fab1464016900ca" Sep 30 10:48:33 crc kubenswrapper[4730]: I0930 10:48:33.387933 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:48:33 crc kubenswrapper[4730]: E0930 10:48:33.388429 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:48:47 crc kubenswrapper[4730]: I0930 10:48:47.380965 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:48:47 crc kubenswrapper[4730]: E0930 10:48:47.381804 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:48:59 crc kubenswrapper[4730]: I0930 10:48:59.381463 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:48:59 crc kubenswrapper[4730]: E0930 10:48:59.382904 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:49:13 crc kubenswrapper[4730]: I0930 10:49:13.381046 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:49:13 crc kubenswrapper[4730]: E0930 10:49:13.381883 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:49:26 crc kubenswrapper[4730]: I0930 10:49:26.389732 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:49:26 crc kubenswrapper[4730]: E0930 10:49:26.390531 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:49:39 crc kubenswrapper[4730]: I0930 10:49:39.381351 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:49:39 crc kubenswrapper[4730]: E0930 10:49:39.382153 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:49:53 crc kubenswrapper[4730]: I0930 10:49:53.380706 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:49:53 crc kubenswrapper[4730]: E0930 10:49:53.381568 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:50:08 crc kubenswrapper[4730]: I0930 10:50:08.381484 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:50:08 crc kubenswrapper[4730]: E0930 10:50:08.382433 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:50:19 crc kubenswrapper[4730]: I0930 10:50:19.381565 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:50:19 crc kubenswrapper[4730]: E0930 10:50:19.385587 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:50:33 crc kubenswrapper[4730]: I0930 10:50:33.381036 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:50:33 crc kubenswrapper[4730]: E0930 10:50:33.381933 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:50:45 crc kubenswrapper[4730]: I0930 10:50:45.381057 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:50:45 crc kubenswrapper[4730]: E0930 10:50:45.383829 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:51:00 crc kubenswrapper[4730]: I0930 10:51:00.380989 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:51:00 crc kubenswrapper[4730]: E0930 10:51:00.382198 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:51:11 crc kubenswrapper[4730]: I0930 10:51:11.382086 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:51:11 crc kubenswrapper[4730]: E0930 10:51:11.383100 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:51:22 crc kubenswrapper[4730]: I0930 10:51:22.381459 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:51:22 crc kubenswrapper[4730]: E0930 10:51:22.382143 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:51:33 crc kubenswrapper[4730]: I0930 10:51:33.381290 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:51:33 crc kubenswrapper[4730]: E0930 10:51:33.382169 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:51:44 crc kubenswrapper[4730]: I0930 10:51:44.381337 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:51:44 crc kubenswrapper[4730]: E0930 10:51:44.382128 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.076123 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bfgsk"] Sep 30 10:51:49 crc kubenswrapper[4730]: E0930 10:51:49.077147 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" containerName="extract-content" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.077169 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" containerName="extract-content" Sep 30 10:51:49 crc kubenswrapper[4730]: E0930 10:51:49.077201 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" containerName="extract-utilities" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.077211 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" containerName="extract-utilities" Sep 30 10:51:49 crc kubenswrapper[4730]: E0930 10:51:49.077245 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" containerName="registry-server" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.077253 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" containerName="registry-server" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.077468 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b6e25c9-31ec-4d10-a1a1-8310cd185e1d" containerName="registry-server" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.079007 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.104199 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bfgsk"] Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.155819 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5fd03da-4221-435b-93b4-3030556fb357-utilities\") pod \"redhat-operators-bfgsk\" (UID: \"b5fd03da-4221-435b-93b4-3030556fb357\") " pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.155876 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5fd03da-4221-435b-93b4-3030556fb357-catalog-content\") pod \"redhat-operators-bfgsk\" (UID: \"b5fd03da-4221-435b-93b4-3030556fb357\") " pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.155933 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tl7k\" (UniqueName: \"kubernetes.io/projected/b5fd03da-4221-435b-93b4-3030556fb357-kube-api-access-2tl7k\") pod \"redhat-operators-bfgsk\" (UID: \"b5fd03da-4221-435b-93b4-3030556fb357\") " pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.258034 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tl7k\" (UniqueName: \"kubernetes.io/projected/b5fd03da-4221-435b-93b4-3030556fb357-kube-api-access-2tl7k\") pod \"redhat-operators-bfgsk\" (UID: \"b5fd03da-4221-435b-93b4-3030556fb357\") " pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.258267 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5fd03da-4221-435b-93b4-3030556fb357-utilities\") pod \"redhat-operators-bfgsk\" (UID: \"b5fd03da-4221-435b-93b4-3030556fb357\") " pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.258331 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5fd03da-4221-435b-93b4-3030556fb357-catalog-content\") pod \"redhat-operators-bfgsk\" (UID: \"b5fd03da-4221-435b-93b4-3030556fb357\") " pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.258751 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5fd03da-4221-435b-93b4-3030556fb357-utilities\") pod \"redhat-operators-bfgsk\" (UID: \"b5fd03da-4221-435b-93b4-3030556fb357\") " pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.258770 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5fd03da-4221-435b-93b4-3030556fb357-catalog-content\") pod \"redhat-operators-bfgsk\" (UID: \"b5fd03da-4221-435b-93b4-3030556fb357\") " pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.641626 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tl7k\" (UniqueName: \"kubernetes.io/projected/b5fd03da-4221-435b-93b4-3030556fb357-kube-api-access-2tl7k\") pod \"redhat-operators-bfgsk\" (UID: \"b5fd03da-4221-435b-93b4-3030556fb357\") " pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:51:49 crc kubenswrapper[4730]: I0930 10:51:49.707283 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:51:50 crc kubenswrapper[4730]: I0930 10:51:50.183020 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bfgsk"] Sep 30 10:51:50 crc kubenswrapper[4730]: I0930 10:51:50.252863 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bfgsk" event={"ID":"b5fd03da-4221-435b-93b4-3030556fb357","Type":"ContainerStarted","Data":"9c97f675100892d9d12cd01f3d8f9fa0eca903826b7218f99827f8e502cb984b"} Sep 30 10:51:51 crc kubenswrapper[4730]: I0930 10:51:51.265473 4730 generic.go:334] "Generic (PLEG): container finished" podID="b5fd03da-4221-435b-93b4-3030556fb357" containerID="e26502fde8dcdda43a5089b7e647d552f52ef8f5f3bed51955450e7eb637b2b9" exitCode=0 Sep 30 10:51:51 crc kubenswrapper[4730]: I0930 10:51:51.265527 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bfgsk" event={"ID":"b5fd03da-4221-435b-93b4-3030556fb357","Type":"ContainerDied","Data":"e26502fde8dcdda43a5089b7e647d552f52ef8f5f3bed51955450e7eb637b2b9"} Sep 30 10:51:51 crc kubenswrapper[4730]: I0930 10:51:51.269578 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 10:51:52 crc kubenswrapper[4730]: I0930 10:51:52.280137 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bfgsk" event={"ID":"b5fd03da-4221-435b-93b4-3030556fb357","Type":"ContainerStarted","Data":"f2339b49094e765161c2420270dd13dd8bd6e59ea5604ece30fdd96c72df29e5"} Sep 30 10:51:56 crc kubenswrapper[4730]: I0930 10:51:56.345493 4730 generic.go:334] "Generic (PLEG): container finished" podID="b5fd03da-4221-435b-93b4-3030556fb357" containerID="f2339b49094e765161c2420270dd13dd8bd6e59ea5604ece30fdd96c72df29e5" exitCode=0 Sep 30 10:51:56 crc kubenswrapper[4730]: I0930 10:51:56.345988 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bfgsk" event={"ID":"b5fd03da-4221-435b-93b4-3030556fb357","Type":"ContainerDied","Data":"f2339b49094e765161c2420270dd13dd8bd6e59ea5604ece30fdd96c72df29e5"} Sep 30 10:51:57 crc kubenswrapper[4730]: I0930 10:51:57.358897 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bfgsk" event={"ID":"b5fd03da-4221-435b-93b4-3030556fb357","Type":"ContainerStarted","Data":"a8586d498839c52538493aafef9678ab0c1e672e91a7f703028f40c277d0d469"} Sep 30 10:51:57 crc kubenswrapper[4730]: I0930 10:51:57.381941 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bfgsk" podStartSLOduration=2.885204939 podStartE2EDuration="8.381922306s" podCreationTimestamp="2025-09-30 10:51:49 +0000 UTC" firstStartedPulling="2025-09-30 10:51:51.269280594 +0000 UTC m=+3755.602540587" lastFinishedPulling="2025-09-30 10:51:56.765997961 +0000 UTC m=+3761.099257954" observedRunningTime="2025-09-30 10:51:57.376811603 +0000 UTC m=+3761.710071606" watchObservedRunningTime="2025-09-30 10:51:57.381922306 +0000 UTC m=+3761.715182299" Sep 30 10:51:58 crc kubenswrapper[4730]: I0930 10:51:58.381585 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:51:58 crc kubenswrapper[4730]: E0930 10:51:58.382179 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:51:59 crc kubenswrapper[4730]: I0930 10:51:59.707434 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:51:59 crc kubenswrapper[4730]: I0930 10:51:59.707476 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:52:00 crc kubenswrapper[4730]: I0930 10:52:00.756344 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bfgsk" podUID="b5fd03da-4221-435b-93b4-3030556fb357" containerName="registry-server" probeResult="failure" output=< Sep 30 10:52:00 crc kubenswrapper[4730]: timeout: failed to connect service ":50051" within 1s Sep 30 10:52:00 crc kubenswrapper[4730]: > Sep 30 10:52:09 crc kubenswrapper[4730]: I0930 10:52:09.381507 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:52:09 crc kubenswrapper[4730]: E0930 10:52:09.382638 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:52:09 crc kubenswrapper[4730]: I0930 10:52:09.778963 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:52:09 crc kubenswrapper[4730]: I0930 10:52:09.858732 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:52:10 crc kubenswrapper[4730]: I0930 10:52:10.021092 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bfgsk"] Sep 30 10:52:11 crc kubenswrapper[4730]: I0930 10:52:11.504233 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bfgsk" podUID="b5fd03da-4221-435b-93b4-3030556fb357" containerName="registry-server" containerID="cri-o://a8586d498839c52538493aafef9678ab0c1e672e91a7f703028f40c277d0d469" gracePeriod=2 Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.259869 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.415523 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tl7k\" (UniqueName: \"kubernetes.io/projected/b5fd03da-4221-435b-93b4-3030556fb357-kube-api-access-2tl7k\") pod \"b5fd03da-4221-435b-93b4-3030556fb357\" (UID: \"b5fd03da-4221-435b-93b4-3030556fb357\") " Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.415754 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5fd03da-4221-435b-93b4-3030556fb357-utilities\") pod \"b5fd03da-4221-435b-93b4-3030556fb357\" (UID: \"b5fd03da-4221-435b-93b4-3030556fb357\") " Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.415817 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5fd03da-4221-435b-93b4-3030556fb357-catalog-content\") pod \"b5fd03da-4221-435b-93b4-3030556fb357\" (UID: \"b5fd03da-4221-435b-93b4-3030556fb357\") " Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.421718 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5fd03da-4221-435b-93b4-3030556fb357-utilities" (OuterVolumeSpecName: "utilities") pod "b5fd03da-4221-435b-93b4-3030556fb357" (UID: "b5fd03da-4221-435b-93b4-3030556fb357"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.423004 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5fd03da-4221-435b-93b4-3030556fb357-kube-api-access-2tl7k" (OuterVolumeSpecName: "kube-api-access-2tl7k") pod "b5fd03da-4221-435b-93b4-3030556fb357" (UID: "b5fd03da-4221-435b-93b4-3030556fb357"). InnerVolumeSpecName "kube-api-access-2tl7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.515428 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5fd03da-4221-435b-93b4-3030556fb357-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5fd03da-4221-435b-93b4-3030556fb357" (UID: "b5fd03da-4221-435b-93b4-3030556fb357"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.520401 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tl7k\" (UniqueName: \"kubernetes.io/projected/b5fd03da-4221-435b-93b4-3030556fb357-kube-api-access-2tl7k\") on node \"crc\" DevicePath \"\"" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.520601 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5fd03da-4221-435b-93b4-3030556fb357-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.520701 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5fd03da-4221-435b-93b4-3030556fb357-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.522135 4730 generic.go:334] "Generic (PLEG): container finished" podID="b5fd03da-4221-435b-93b4-3030556fb357" containerID="a8586d498839c52538493aafef9678ab0c1e672e91a7f703028f40c277d0d469" exitCode=0 Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.522237 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bfgsk" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.522243 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bfgsk" event={"ID":"b5fd03da-4221-435b-93b4-3030556fb357","Type":"ContainerDied","Data":"a8586d498839c52538493aafef9678ab0c1e672e91a7f703028f40c277d0d469"} Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.522880 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bfgsk" event={"ID":"b5fd03da-4221-435b-93b4-3030556fb357","Type":"ContainerDied","Data":"9c97f675100892d9d12cd01f3d8f9fa0eca903826b7218f99827f8e502cb984b"} Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.522950 4730 scope.go:117] "RemoveContainer" containerID="a8586d498839c52538493aafef9678ab0c1e672e91a7f703028f40c277d0d469" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.543513 4730 scope.go:117] "RemoveContainer" containerID="f2339b49094e765161c2420270dd13dd8bd6e59ea5604ece30fdd96c72df29e5" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.572687 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bfgsk"] Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.581725 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bfgsk"] Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.593536 4730 scope.go:117] "RemoveContainer" containerID="e26502fde8dcdda43a5089b7e647d552f52ef8f5f3bed51955450e7eb637b2b9" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.615702 4730 scope.go:117] "RemoveContainer" containerID="a8586d498839c52538493aafef9678ab0c1e672e91a7f703028f40c277d0d469" Sep 30 10:52:12 crc kubenswrapper[4730]: E0930 10:52:12.616408 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8586d498839c52538493aafef9678ab0c1e672e91a7f703028f40c277d0d469\": container with ID starting with a8586d498839c52538493aafef9678ab0c1e672e91a7f703028f40c277d0d469 not found: ID does not exist" containerID="a8586d498839c52538493aafef9678ab0c1e672e91a7f703028f40c277d0d469" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.616516 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8586d498839c52538493aafef9678ab0c1e672e91a7f703028f40c277d0d469"} err="failed to get container status \"a8586d498839c52538493aafef9678ab0c1e672e91a7f703028f40c277d0d469\": rpc error: code = NotFound desc = could not find container \"a8586d498839c52538493aafef9678ab0c1e672e91a7f703028f40c277d0d469\": container with ID starting with a8586d498839c52538493aafef9678ab0c1e672e91a7f703028f40c277d0d469 not found: ID does not exist" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.616588 4730 scope.go:117] "RemoveContainer" containerID="f2339b49094e765161c2420270dd13dd8bd6e59ea5604ece30fdd96c72df29e5" Sep 30 10:52:12 crc kubenswrapper[4730]: E0930 10:52:12.617421 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2339b49094e765161c2420270dd13dd8bd6e59ea5604ece30fdd96c72df29e5\": container with ID starting with f2339b49094e765161c2420270dd13dd8bd6e59ea5604ece30fdd96c72df29e5 not found: ID does not exist" containerID="f2339b49094e765161c2420270dd13dd8bd6e59ea5604ece30fdd96c72df29e5" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.617471 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2339b49094e765161c2420270dd13dd8bd6e59ea5604ece30fdd96c72df29e5"} err="failed to get container status \"f2339b49094e765161c2420270dd13dd8bd6e59ea5604ece30fdd96c72df29e5\": rpc error: code = NotFound desc = could not find container \"f2339b49094e765161c2420270dd13dd8bd6e59ea5604ece30fdd96c72df29e5\": container with ID starting with f2339b49094e765161c2420270dd13dd8bd6e59ea5604ece30fdd96c72df29e5 not found: ID does not exist" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.617498 4730 scope.go:117] "RemoveContainer" containerID="e26502fde8dcdda43a5089b7e647d552f52ef8f5f3bed51955450e7eb637b2b9" Sep 30 10:52:12 crc kubenswrapper[4730]: E0930 10:52:12.617806 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e26502fde8dcdda43a5089b7e647d552f52ef8f5f3bed51955450e7eb637b2b9\": container with ID starting with e26502fde8dcdda43a5089b7e647d552f52ef8f5f3bed51955450e7eb637b2b9 not found: ID does not exist" containerID="e26502fde8dcdda43a5089b7e647d552f52ef8f5f3bed51955450e7eb637b2b9" Sep 30 10:52:12 crc kubenswrapper[4730]: I0930 10:52:12.617854 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e26502fde8dcdda43a5089b7e647d552f52ef8f5f3bed51955450e7eb637b2b9"} err="failed to get container status \"e26502fde8dcdda43a5089b7e647d552f52ef8f5f3bed51955450e7eb637b2b9\": rpc error: code = NotFound desc = could not find container \"e26502fde8dcdda43a5089b7e647d552f52ef8f5f3bed51955450e7eb637b2b9\": container with ID starting with e26502fde8dcdda43a5089b7e647d552f52ef8f5f3bed51955450e7eb637b2b9 not found: ID does not exist" Sep 30 10:52:14 crc kubenswrapper[4730]: I0930 10:52:14.397188 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5fd03da-4221-435b-93b4-3030556fb357" path="/var/lib/kubelet/pods/b5fd03da-4221-435b-93b4-3030556fb357/volumes" Sep 30 10:52:21 crc kubenswrapper[4730]: I0930 10:52:21.381992 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:52:21 crc kubenswrapper[4730]: E0930 10:52:21.383148 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:52:35 crc kubenswrapper[4730]: I0930 10:52:35.381418 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:52:35 crc kubenswrapper[4730]: E0930 10:52:35.382194 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:52:50 crc kubenswrapper[4730]: I0930 10:52:50.381463 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:52:50 crc kubenswrapper[4730]: E0930 10:52:50.382276 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:53:04 crc kubenswrapper[4730]: I0930 10:53:04.381553 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:53:04 crc kubenswrapper[4730]: E0930 10:53:04.382343 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:53:19 crc kubenswrapper[4730]: I0930 10:53:19.380587 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:53:19 crc kubenswrapper[4730]: E0930 10:53:19.381254 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 10:53:32 crc kubenswrapper[4730]: I0930 10:53:32.382019 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:53:33 crc kubenswrapper[4730]: I0930 10:53:33.386604 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"47b7b782f1e001d15732ffb76128adb79231ada7ca853fc60fcf67f1fffb507e"} Sep 30 10:54:59 crc kubenswrapper[4730]: E0930 10:54:59.379397 4730 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.104:56056->38.102.83.104:41011: write tcp 38.102.83.104:56056->38.102.83.104:41011: write: broken pipe Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.521395 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-49kzs"] Sep 30 10:55:55 crc kubenswrapper[4730]: E0930 10:55:55.522178 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5fd03da-4221-435b-93b4-3030556fb357" containerName="extract-utilities" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.522190 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5fd03da-4221-435b-93b4-3030556fb357" containerName="extract-utilities" Sep 30 10:55:55 crc kubenswrapper[4730]: E0930 10:55:55.522211 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5fd03da-4221-435b-93b4-3030556fb357" containerName="extract-content" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.522217 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5fd03da-4221-435b-93b4-3030556fb357" containerName="extract-content" Sep 30 10:55:55 crc kubenswrapper[4730]: E0930 10:55:55.522240 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5fd03da-4221-435b-93b4-3030556fb357" containerName="registry-server" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.522248 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5fd03da-4221-435b-93b4-3030556fb357" containerName="registry-server" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.522434 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5fd03da-4221-435b-93b4-3030556fb357" containerName="registry-server" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.523740 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.555230 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-49kzs"] Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.602507 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m42pf\" (UniqueName: \"kubernetes.io/projected/706997b1-7293-4578-8f00-acc1a642679d-kube-api-access-m42pf\") pod \"community-operators-49kzs\" (UID: \"706997b1-7293-4578-8f00-acc1a642679d\") " pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.603003 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/706997b1-7293-4578-8f00-acc1a642679d-utilities\") pod \"community-operators-49kzs\" (UID: \"706997b1-7293-4578-8f00-acc1a642679d\") " pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.603103 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/706997b1-7293-4578-8f00-acc1a642679d-catalog-content\") pod \"community-operators-49kzs\" (UID: \"706997b1-7293-4578-8f00-acc1a642679d\") " pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.704428 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m42pf\" (UniqueName: \"kubernetes.io/projected/706997b1-7293-4578-8f00-acc1a642679d-kube-api-access-m42pf\") pod \"community-operators-49kzs\" (UID: \"706997b1-7293-4578-8f00-acc1a642679d\") " pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.704490 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/706997b1-7293-4578-8f00-acc1a642679d-utilities\") pod \"community-operators-49kzs\" (UID: \"706997b1-7293-4578-8f00-acc1a642679d\") " pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.704532 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/706997b1-7293-4578-8f00-acc1a642679d-catalog-content\") pod \"community-operators-49kzs\" (UID: \"706997b1-7293-4578-8f00-acc1a642679d\") " pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.705049 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/706997b1-7293-4578-8f00-acc1a642679d-catalog-content\") pod \"community-operators-49kzs\" (UID: \"706997b1-7293-4578-8f00-acc1a642679d\") " pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.705317 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/706997b1-7293-4578-8f00-acc1a642679d-utilities\") pod \"community-operators-49kzs\" (UID: \"706997b1-7293-4578-8f00-acc1a642679d\") " pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.731703 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m42pf\" (UniqueName: \"kubernetes.io/projected/706997b1-7293-4578-8f00-acc1a642679d-kube-api-access-m42pf\") pod \"community-operators-49kzs\" (UID: \"706997b1-7293-4578-8f00-acc1a642679d\") " pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:55:55 crc kubenswrapper[4730]: I0930 10:55:55.849269 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:55:56 crc kubenswrapper[4730]: I0930 10:55:56.335126 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-49kzs"] Sep 30 10:55:56 crc kubenswrapper[4730]: I0930 10:55:56.890658 4730 generic.go:334] "Generic (PLEG): container finished" podID="706997b1-7293-4578-8f00-acc1a642679d" containerID="1a29d99b2067931c97ffb52cf54b8912c04b2d6d0412c99029d0f2b9538dea53" exitCode=0 Sep 30 10:55:56 crc kubenswrapper[4730]: I0930 10:55:56.890803 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49kzs" event={"ID":"706997b1-7293-4578-8f00-acc1a642679d","Type":"ContainerDied","Data":"1a29d99b2067931c97ffb52cf54b8912c04b2d6d0412c99029d0f2b9538dea53"} Sep 30 10:55:56 crc kubenswrapper[4730]: I0930 10:55:56.891022 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49kzs" event={"ID":"706997b1-7293-4578-8f00-acc1a642679d","Type":"ContainerStarted","Data":"7dec50d8cfd7084bc5c0a7e818745982ca70dd394e73cee9aa5f054580baa7c2"} Sep 30 10:55:58 crc kubenswrapper[4730]: I0930 10:55:58.946562 4730 generic.go:334] "Generic (PLEG): container finished" podID="706997b1-7293-4578-8f00-acc1a642679d" containerID="24d387ee4f6c7b95ef13e8fbee079420c366513253b916ff2b0b00442042b396" exitCode=0 Sep 30 10:55:58 crc kubenswrapper[4730]: I0930 10:55:58.946607 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49kzs" event={"ID":"706997b1-7293-4578-8f00-acc1a642679d","Type":"ContainerDied","Data":"24d387ee4f6c7b95ef13e8fbee079420c366513253b916ff2b0b00442042b396"} Sep 30 10:55:59 crc kubenswrapper[4730]: I0930 10:55:59.960045 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49kzs" event={"ID":"706997b1-7293-4578-8f00-acc1a642679d","Type":"ContainerStarted","Data":"e1e43f4f4a44fd91db2f37ee725ba3e8dced86ddf6a3840bce3eec4b0e5ff120"} Sep 30 10:55:59 crc kubenswrapper[4730]: I0930 10:55:59.990997 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-49kzs" podStartSLOduration=2.323766474 podStartE2EDuration="4.990968101s" podCreationTimestamp="2025-09-30 10:55:55 +0000 UTC" firstStartedPulling="2025-09-30 10:55:56.89345572 +0000 UTC m=+4001.226715713" lastFinishedPulling="2025-09-30 10:55:59.560657347 +0000 UTC m=+4003.893917340" observedRunningTime="2025-09-30 10:55:59.983297711 +0000 UTC m=+4004.316557714" watchObservedRunningTime="2025-09-30 10:55:59.990968101 +0000 UTC m=+4004.324228094" Sep 30 10:56:02 crc kubenswrapper[4730]: I0930 10:56:02.336957 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:56:02 crc kubenswrapper[4730]: I0930 10:56:02.337273 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:56:05 crc kubenswrapper[4730]: I0930 10:56:05.851640 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:56:05 crc kubenswrapper[4730]: I0930 10:56:05.852375 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:56:05 crc kubenswrapper[4730]: I0930 10:56:05.943386 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:56:06 crc kubenswrapper[4730]: I0930 10:56:06.104501 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:56:07 crc kubenswrapper[4730]: I0930 10:56:07.705773 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-49kzs"] Sep 30 10:56:08 crc kubenswrapper[4730]: I0930 10:56:08.044177 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-49kzs" podUID="706997b1-7293-4578-8f00-acc1a642679d" containerName="registry-server" containerID="cri-o://e1e43f4f4a44fd91db2f37ee725ba3e8dced86ddf6a3840bce3eec4b0e5ff120" gracePeriod=2 Sep 30 10:56:08 crc kubenswrapper[4730]: I0930 10:56:08.581528 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:56:08 crc kubenswrapper[4730]: I0930 10:56:08.698427 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/706997b1-7293-4578-8f00-acc1a642679d-catalog-content\") pod \"706997b1-7293-4578-8f00-acc1a642679d\" (UID: \"706997b1-7293-4578-8f00-acc1a642679d\") " Sep 30 10:56:08 crc kubenswrapper[4730]: I0930 10:56:08.698479 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/706997b1-7293-4578-8f00-acc1a642679d-utilities\") pod \"706997b1-7293-4578-8f00-acc1a642679d\" (UID: \"706997b1-7293-4578-8f00-acc1a642679d\") " Sep 30 10:56:08 crc kubenswrapper[4730]: I0930 10:56:08.698967 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m42pf\" (UniqueName: \"kubernetes.io/projected/706997b1-7293-4578-8f00-acc1a642679d-kube-api-access-m42pf\") pod \"706997b1-7293-4578-8f00-acc1a642679d\" (UID: \"706997b1-7293-4578-8f00-acc1a642679d\") " Sep 30 10:56:08 crc kubenswrapper[4730]: I0930 10:56:08.699686 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/706997b1-7293-4578-8f00-acc1a642679d-utilities" (OuterVolumeSpecName: "utilities") pod "706997b1-7293-4578-8f00-acc1a642679d" (UID: "706997b1-7293-4578-8f00-acc1a642679d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:56:08 crc kubenswrapper[4730]: I0930 10:56:08.705759 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/706997b1-7293-4578-8f00-acc1a642679d-kube-api-access-m42pf" (OuterVolumeSpecName: "kube-api-access-m42pf") pod "706997b1-7293-4578-8f00-acc1a642679d" (UID: "706997b1-7293-4578-8f00-acc1a642679d"). InnerVolumeSpecName "kube-api-access-m42pf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:56:08 crc kubenswrapper[4730]: I0930 10:56:08.800901 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/706997b1-7293-4578-8f00-acc1a642679d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:56:08 crc kubenswrapper[4730]: I0930 10:56:08.800933 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m42pf\" (UniqueName: \"kubernetes.io/projected/706997b1-7293-4578-8f00-acc1a642679d-kube-api-access-m42pf\") on node \"crc\" DevicePath \"\"" Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.059913 4730 generic.go:334] "Generic (PLEG): container finished" podID="706997b1-7293-4578-8f00-acc1a642679d" containerID="e1e43f4f4a44fd91db2f37ee725ba3e8dced86ddf6a3840bce3eec4b0e5ff120" exitCode=0 Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.059945 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49kzs" event={"ID":"706997b1-7293-4578-8f00-acc1a642679d","Type":"ContainerDied","Data":"e1e43f4f4a44fd91db2f37ee725ba3e8dced86ddf6a3840bce3eec4b0e5ff120"} Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.059997 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49kzs" event={"ID":"706997b1-7293-4578-8f00-acc1a642679d","Type":"ContainerDied","Data":"7dec50d8cfd7084bc5c0a7e818745982ca70dd394e73cee9aa5f054580baa7c2"} Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.060019 4730 scope.go:117] "RemoveContainer" containerID="e1e43f4f4a44fd91db2f37ee725ba3e8dced86ddf6a3840bce3eec4b0e5ff120" Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.060801 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-49kzs" Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.099651 4730 scope.go:117] "RemoveContainer" containerID="24d387ee4f6c7b95ef13e8fbee079420c366513253b916ff2b0b00442042b396" Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.131199 4730 scope.go:117] "RemoveContainer" containerID="1a29d99b2067931c97ffb52cf54b8912c04b2d6d0412c99029d0f2b9538dea53" Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.166851 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/706997b1-7293-4578-8f00-acc1a642679d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "706997b1-7293-4578-8f00-acc1a642679d" (UID: "706997b1-7293-4578-8f00-acc1a642679d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.194177 4730 scope.go:117] "RemoveContainer" containerID="e1e43f4f4a44fd91db2f37ee725ba3e8dced86ddf6a3840bce3eec4b0e5ff120" Sep 30 10:56:09 crc kubenswrapper[4730]: E0930 10:56:09.195131 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1e43f4f4a44fd91db2f37ee725ba3e8dced86ddf6a3840bce3eec4b0e5ff120\": container with ID starting with e1e43f4f4a44fd91db2f37ee725ba3e8dced86ddf6a3840bce3eec4b0e5ff120 not found: ID does not exist" containerID="e1e43f4f4a44fd91db2f37ee725ba3e8dced86ddf6a3840bce3eec4b0e5ff120" Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.195162 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1e43f4f4a44fd91db2f37ee725ba3e8dced86ddf6a3840bce3eec4b0e5ff120"} err="failed to get container status \"e1e43f4f4a44fd91db2f37ee725ba3e8dced86ddf6a3840bce3eec4b0e5ff120\": rpc error: code = NotFound desc = could not find container \"e1e43f4f4a44fd91db2f37ee725ba3e8dced86ddf6a3840bce3eec4b0e5ff120\": container with ID starting with e1e43f4f4a44fd91db2f37ee725ba3e8dced86ddf6a3840bce3eec4b0e5ff120 not found: ID does not exist" Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.195198 4730 scope.go:117] "RemoveContainer" containerID="24d387ee4f6c7b95ef13e8fbee079420c366513253b916ff2b0b00442042b396" Sep 30 10:56:09 crc kubenswrapper[4730]: E0930 10:56:09.195576 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24d387ee4f6c7b95ef13e8fbee079420c366513253b916ff2b0b00442042b396\": container with ID starting with 24d387ee4f6c7b95ef13e8fbee079420c366513253b916ff2b0b00442042b396 not found: ID does not exist" containerID="24d387ee4f6c7b95ef13e8fbee079420c366513253b916ff2b0b00442042b396" Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.195676 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24d387ee4f6c7b95ef13e8fbee079420c366513253b916ff2b0b00442042b396"} err="failed to get container status \"24d387ee4f6c7b95ef13e8fbee079420c366513253b916ff2b0b00442042b396\": rpc error: code = NotFound desc = could not find container \"24d387ee4f6c7b95ef13e8fbee079420c366513253b916ff2b0b00442042b396\": container with ID starting with 24d387ee4f6c7b95ef13e8fbee079420c366513253b916ff2b0b00442042b396 not found: ID does not exist" Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.195709 4730 scope.go:117] "RemoveContainer" containerID="1a29d99b2067931c97ffb52cf54b8912c04b2d6d0412c99029d0f2b9538dea53" Sep 30 10:56:09 crc kubenswrapper[4730]: E0930 10:56:09.196051 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a29d99b2067931c97ffb52cf54b8912c04b2d6d0412c99029d0f2b9538dea53\": container with ID starting with 1a29d99b2067931c97ffb52cf54b8912c04b2d6d0412c99029d0f2b9538dea53 not found: ID does not exist" containerID="1a29d99b2067931c97ffb52cf54b8912c04b2d6d0412c99029d0f2b9538dea53" Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.196097 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a29d99b2067931c97ffb52cf54b8912c04b2d6d0412c99029d0f2b9538dea53"} err="failed to get container status \"1a29d99b2067931c97ffb52cf54b8912c04b2d6d0412c99029d0f2b9538dea53\": rpc error: code = NotFound desc = could not find container \"1a29d99b2067931c97ffb52cf54b8912c04b2d6d0412c99029d0f2b9538dea53\": container with ID starting with 1a29d99b2067931c97ffb52cf54b8912c04b2d6d0412c99029d0f2b9538dea53 not found: ID does not exist" Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.209863 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/706997b1-7293-4578-8f00-acc1a642679d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.433383 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-49kzs"] Sep 30 10:56:09 crc kubenswrapper[4730]: I0930 10:56:09.444045 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-49kzs"] Sep 30 10:56:10 crc kubenswrapper[4730]: I0930 10:56:10.393931 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="706997b1-7293-4578-8f00-acc1a642679d" path="/var/lib/kubelet/pods/706997b1-7293-4578-8f00-acc1a642679d/volumes" Sep 30 10:56:32 crc kubenswrapper[4730]: I0930 10:56:32.336918 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:56:32 crc kubenswrapper[4730]: I0930 10:56:32.337889 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:56:33 crc kubenswrapper[4730]: E0930 10:56:33.267663 4730 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.104:58502->38.102.83.104:41011: write tcp 38.102.83.104:58502->38.102.83.104:41011: write: connection reset by peer Sep 30 10:56:37 crc kubenswrapper[4730]: E0930 10:56:37.579181 4730 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.104:58604->38.102.83.104:41011: write tcp 38.102.83.104:58604->38.102.83.104:41011: write: broken pipe Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.709872 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-phmcl"] Sep 30 10:56:37 crc kubenswrapper[4730]: E0930 10:56:37.710483 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="706997b1-7293-4578-8f00-acc1a642679d" containerName="registry-server" Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.710679 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="706997b1-7293-4578-8f00-acc1a642679d" containerName="registry-server" Sep 30 10:56:37 crc kubenswrapper[4730]: E0930 10:56:37.710860 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="706997b1-7293-4578-8f00-acc1a642679d" containerName="extract-content" Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.710881 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="706997b1-7293-4578-8f00-acc1a642679d" containerName="extract-content" Sep 30 10:56:37 crc kubenswrapper[4730]: E0930 10:56:37.710904 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="706997b1-7293-4578-8f00-acc1a642679d" containerName="extract-utilities" Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.710915 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="706997b1-7293-4578-8f00-acc1a642679d" containerName="extract-utilities" Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.711176 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="706997b1-7293-4578-8f00-acc1a642679d" containerName="registry-server" Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.713033 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.729988 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-phmcl"] Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.858324 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-utilities\") pod \"certified-operators-phmcl\" (UID: \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\") " pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.858703 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-catalog-content\") pod \"certified-operators-phmcl\" (UID: \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\") " pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.858773 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjbp2\" (UniqueName: \"kubernetes.io/projected/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-kube-api-access-kjbp2\") pod \"certified-operators-phmcl\" (UID: \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\") " pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.961527 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-utilities\") pod \"certified-operators-phmcl\" (UID: \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\") " pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.961692 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-catalog-content\") pod \"certified-operators-phmcl\" (UID: \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\") " pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.961854 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjbp2\" (UniqueName: \"kubernetes.io/projected/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-kube-api-access-kjbp2\") pod \"certified-operators-phmcl\" (UID: \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\") " pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.962112 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-utilities\") pod \"certified-operators-phmcl\" (UID: \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\") " pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:37 crc kubenswrapper[4730]: I0930 10:56:37.962162 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-catalog-content\") pod \"certified-operators-phmcl\" (UID: \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\") " pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:38 crc kubenswrapper[4730]: I0930 10:56:38.000659 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjbp2\" (UniqueName: \"kubernetes.io/projected/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-kube-api-access-kjbp2\") pod \"certified-operators-phmcl\" (UID: \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\") " pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:38 crc kubenswrapper[4730]: I0930 10:56:38.044284 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:38 crc kubenswrapper[4730]: I0930 10:56:38.619739 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-phmcl"] Sep 30 10:56:39 crc kubenswrapper[4730]: I0930 10:56:39.382382 4730 generic.go:334] "Generic (PLEG): container finished" podID="6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" containerID="d0a2f87692efacf2afc460cd3fd258e12b4a9c02297fe733998dc5ffb8377c3d" exitCode=0 Sep 30 10:56:39 crc kubenswrapper[4730]: I0930 10:56:39.382496 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-phmcl" event={"ID":"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7","Type":"ContainerDied","Data":"d0a2f87692efacf2afc460cd3fd258e12b4a9c02297fe733998dc5ffb8377c3d"} Sep 30 10:56:39 crc kubenswrapper[4730]: I0930 10:56:39.382907 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-phmcl" event={"ID":"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7","Type":"ContainerStarted","Data":"0431c091d91a6cc706788cf374336a77bb5b1c6c08b8892ffcb54004f9389336"} Sep 30 10:56:41 crc kubenswrapper[4730]: I0930 10:56:41.401979 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-phmcl" event={"ID":"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7","Type":"ContainerStarted","Data":"6223c25e9c17c0a86a0ca5ca098f4bfa79082aab1fb79e19c71fecd36d4b11a9"} Sep 30 10:56:42 crc kubenswrapper[4730]: I0930 10:56:42.432215 4730 generic.go:334] "Generic (PLEG): container finished" podID="6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" containerID="6223c25e9c17c0a86a0ca5ca098f4bfa79082aab1fb79e19c71fecd36d4b11a9" exitCode=0 Sep 30 10:56:42 crc kubenswrapper[4730]: I0930 10:56:42.432277 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-phmcl" event={"ID":"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7","Type":"ContainerDied","Data":"6223c25e9c17c0a86a0ca5ca098f4bfa79082aab1fb79e19c71fecd36d4b11a9"} Sep 30 10:56:44 crc kubenswrapper[4730]: I0930 10:56:44.458772 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-phmcl" event={"ID":"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7","Type":"ContainerStarted","Data":"6d8a900f27dc979bd4f8acd0fd065e5c342984ea3ec20a43683518e3fc7b88aa"} Sep 30 10:56:44 crc kubenswrapper[4730]: I0930 10:56:44.476808 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-phmcl" podStartSLOduration=3.725731469 podStartE2EDuration="7.476791716s" podCreationTimestamp="2025-09-30 10:56:37 +0000 UTC" firstStartedPulling="2025-09-30 10:56:39.385405761 +0000 UTC m=+4043.718665754" lastFinishedPulling="2025-09-30 10:56:43.136465978 +0000 UTC m=+4047.469726001" observedRunningTime="2025-09-30 10:56:44.472823182 +0000 UTC m=+4048.806083175" watchObservedRunningTime="2025-09-30 10:56:44.476791716 +0000 UTC m=+4048.810051699" Sep 30 10:56:48 crc kubenswrapper[4730]: I0930 10:56:48.044748 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:48 crc kubenswrapper[4730]: I0930 10:56:48.045055 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:48 crc kubenswrapper[4730]: I0930 10:56:48.099856 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:48 crc kubenswrapper[4730]: I0930 10:56:48.571154 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:48 crc kubenswrapper[4730]: I0930 10:56:48.625976 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-phmcl"] Sep 30 10:56:50 crc kubenswrapper[4730]: I0930 10:56:50.523909 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-phmcl" podUID="6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" containerName="registry-server" containerID="cri-o://6d8a900f27dc979bd4f8acd0fd065e5c342984ea3ec20a43683518e3fc7b88aa" gracePeriod=2 Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.074354 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.193717 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-catalog-content\") pod \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\" (UID: \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\") " Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.194269 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-utilities\") pod \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\" (UID: \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\") " Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.194469 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjbp2\" (UniqueName: \"kubernetes.io/projected/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-kube-api-access-kjbp2\") pod \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\" (UID: \"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7\") " Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.195360 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-utilities" (OuterVolumeSpecName: "utilities") pod "6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" (UID: "6873c3c0-3a46-44bf-9a3a-ab96a4b688a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.196064 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.204861 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-kube-api-access-kjbp2" (OuterVolumeSpecName: "kube-api-access-kjbp2") pod "6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" (UID: "6873c3c0-3a46-44bf-9a3a-ab96a4b688a7"). InnerVolumeSpecName "kube-api-access-kjbp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.294990 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" (UID: "6873c3c0-3a46-44bf-9a3a-ab96a4b688a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.298490 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjbp2\" (UniqueName: \"kubernetes.io/projected/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-kube-api-access-kjbp2\") on node \"crc\" DevicePath \"\"" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.298533 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.548264 4730 generic.go:334] "Generic (PLEG): container finished" podID="6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" containerID="6d8a900f27dc979bd4f8acd0fd065e5c342984ea3ec20a43683518e3fc7b88aa" exitCode=0 Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.548345 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-phmcl" event={"ID":"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7","Type":"ContainerDied","Data":"6d8a900f27dc979bd4f8acd0fd065e5c342984ea3ec20a43683518e3fc7b88aa"} Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.548413 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-phmcl" event={"ID":"6873c3c0-3a46-44bf-9a3a-ab96a4b688a7","Type":"ContainerDied","Data":"0431c091d91a6cc706788cf374336a77bb5b1c6c08b8892ffcb54004f9389336"} Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.548561 4730 scope.go:117] "RemoveContainer" containerID="6d8a900f27dc979bd4f8acd0fd065e5c342984ea3ec20a43683518e3fc7b88aa" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.548921 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-phmcl" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.608850 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-phmcl"] Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.619844 4730 scope.go:117] "RemoveContainer" containerID="6223c25e9c17c0a86a0ca5ca098f4bfa79082aab1fb79e19c71fecd36d4b11a9" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.621269 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-phmcl"] Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.655553 4730 scope.go:117] "RemoveContainer" containerID="d0a2f87692efacf2afc460cd3fd258e12b4a9c02297fe733998dc5ffb8377c3d" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.703813 4730 scope.go:117] "RemoveContainer" containerID="6d8a900f27dc979bd4f8acd0fd065e5c342984ea3ec20a43683518e3fc7b88aa" Sep 30 10:56:51 crc kubenswrapper[4730]: E0930 10:56:51.704415 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d8a900f27dc979bd4f8acd0fd065e5c342984ea3ec20a43683518e3fc7b88aa\": container with ID starting with 6d8a900f27dc979bd4f8acd0fd065e5c342984ea3ec20a43683518e3fc7b88aa not found: ID does not exist" containerID="6d8a900f27dc979bd4f8acd0fd065e5c342984ea3ec20a43683518e3fc7b88aa" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.704576 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d8a900f27dc979bd4f8acd0fd065e5c342984ea3ec20a43683518e3fc7b88aa"} err="failed to get container status \"6d8a900f27dc979bd4f8acd0fd065e5c342984ea3ec20a43683518e3fc7b88aa\": rpc error: code = NotFound desc = could not find container \"6d8a900f27dc979bd4f8acd0fd065e5c342984ea3ec20a43683518e3fc7b88aa\": container with ID starting with 6d8a900f27dc979bd4f8acd0fd065e5c342984ea3ec20a43683518e3fc7b88aa not found: ID does not exist" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.704761 4730 scope.go:117] "RemoveContainer" containerID="6223c25e9c17c0a86a0ca5ca098f4bfa79082aab1fb79e19c71fecd36d4b11a9" Sep 30 10:56:51 crc kubenswrapper[4730]: E0930 10:56:51.705321 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6223c25e9c17c0a86a0ca5ca098f4bfa79082aab1fb79e19c71fecd36d4b11a9\": container with ID starting with 6223c25e9c17c0a86a0ca5ca098f4bfa79082aab1fb79e19c71fecd36d4b11a9 not found: ID does not exist" containerID="6223c25e9c17c0a86a0ca5ca098f4bfa79082aab1fb79e19c71fecd36d4b11a9" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.705375 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6223c25e9c17c0a86a0ca5ca098f4bfa79082aab1fb79e19c71fecd36d4b11a9"} err="failed to get container status \"6223c25e9c17c0a86a0ca5ca098f4bfa79082aab1fb79e19c71fecd36d4b11a9\": rpc error: code = NotFound desc = could not find container \"6223c25e9c17c0a86a0ca5ca098f4bfa79082aab1fb79e19c71fecd36d4b11a9\": container with ID starting with 6223c25e9c17c0a86a0ca5ca098f4bfa79082aab1fb79e19c71fecd36d4b11a9 not found: ID does not exist" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.705410 4730 scope.go:117] "RemoveContainer" containerID="d0a2f87692efacf2afc460cd3fd258e12b4a9c02297fe733998dc5ffb8377c3d" Sep 30 10:56:51 crc kubenswrapper[4730]: E0930 10:56:51.705763 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0a2f87692efacf2afc460cd3fd258e12b4a9c02297fe733998dc5ffb8377c3d\": container with ID starting with d0a2f87692efacf2afc460cd3fd258e12b4a9c02297fe733998dc5ffb8377c3d not found: ID does not exist" containerID="d0a2f87692efacf2afc460cd3fd258e12b4a9c02297fe733998dc5ffb8377c3d" Sep 30 10:56:51 crc kubenswrapper[4730]: I0930 10:56:51.705796 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0a2f87692efacf2afc460cd3fd258e12b4a9c02297fe733998dc5ffb8377c3d"} err="failed to get container status \"d0a2f87692efacf2afc460cd3fd258e12b4a9c02297fe733998dc5ffb8377c3d\": rpc error: code = NotFound desc = could not find container \"d0a2f87692efacf2afc460cd3fd258e12b4a9c02297fe733998dc5ffb8377c3d\": container with ID starting with d0a2f87692efacf2afc460cd3fd258e12b4a9c02297fe733998dc5ffb8377c3d not found: ID does not exist" Sep 30 10:56:52 crc kubenswrapper[4730]: I0930 10:56:52.394607 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" path="/var/lib/kubelet/pods/6873c3c0-3a46-44bf-9a3a-ab96a4b688a7/volumes" Sep 30 10:57:02 crc kubenswrapper[4730]: I0930 10:57:02.336764 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:57:02 crc kubenswrapper[4730]: I0930 10:57:02.337287 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:57:02 crc kubenswrapper[4730]: I0930 10:57:02.337345 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 10:57:02 crc kubenswrapper[4730]: I0930 10:57:02.338126 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"47b7b782f1e001d15732ffb76128adb79231ada7ca853fc60fcf67f1fffb507e"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 10:57:02 crc kubenswrapper[4730]: I0930 10:57:02.338178 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://47b7b782f1e001d15732ffb76128adb79231ada7ca853fc60fcf67f1fffb507e" gracePeriod=600 Sep 30 10:57:02 crc kubenswrapper[4730]: I0930 10:57:02.692840 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="47b7b782f1e001d15732ffb76128adb79231ada7ca853fc60fcf67f1fffb507e" exitCode=0 Sep 30 10:57:02 crc kubenswrapper[4730]: I0930 10:57:02.692895 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"47b7b782f1e001d15732ffb76128adb79231ada7ca853fc60fcf67f1fffb507e"} Sep 30 10:57:02 crc kubenswrapper[4730]: I0930 10:57:02.693232 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1"} Sep 30 10:57:02 crc kubenswrapper[4730]: I0930 10:57:02.693253 4730 scope.go:117] "RemoveContainer" containerID="cec435c0d3415ad30c7bde79883876150a78e02da8967aef01668d3b6363275b" Sep 30 10:58:21 crc kubenswrapper[4730]: I0930 10:58:21.839038 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mdh55"] Sep 30 10:58:21 crc kubenswrapper[4730]: E0930 10:58:21.840300 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" containerName="extract-utilities" Sep 30 10:58:21 crc kubenswrapper[4730]: I0930 10:58:21.840322 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" containerName="extract-utilities" Sep 30 10:58:21 crc kubenswrapper[4730]: E0930 10:58:21.840356 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" containerName="extract-content" Sep 30 10:58:21 crc kubenswrapper[4730]: I0930 10:58:21.840372 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" containerName="extract-content" Sep 30 10:58:21 crc kubenswrapper[4730]: E0930 10:58:21.840418 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" containerName="registry-server" Sep 30 10:58:21 crc kubenswrapper[4730]: I0930 10:58:21.840432 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" containerName="registry-server" Sep 30 10:58:21 crc kubenswrapper[4730]: I0930 10:58:21.840883 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="6873c3c0-3a46-44bf-9a3a-ab96a4b688a7" containerName="registry-server" Sep 30 10:58:21 crc kubenswrapper[4730]: I0930 10:58:21.844483 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:21 crc kubenswrapper[4730]: I0930 10:58:21.867736 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdh55"] Sep 30 10:58:22 crc kubenswrapper[4730]: I0930 10:58:22.015398 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c426809d-752d-4149-8ece-44dceba59124-catalog-content\") pod \"redhat-marketplace-mdh55\" (UID: \"c426809d-752d-4149-8ece-44dceba59124\") " pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:22 crc kubenswrapper[4730]: I0930 10:58:22.015676 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c426809d-752d-4149-8ece-44dceba59124-utilities\") pod \"redhat-marketplace-mdh55\" (UID: \"c426809d-752d-4149-8ece-44dceba59124\") " pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:22 crc kubenswrapper[4730]: I0930 10:58:22.016003 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8dr4\" (UniqueName: \"kubernetes.io/projected/c426809d-752d-4149-8ece-44dceba59124-kube-api-access-s8dr4\") pod \"redhat-marketplace-mdh55\" (UID: \"c426809d-752d-4149-8ece-44dceba59124\") " pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:22 crc kubenswrapper[4730]: I0930 10:58:22.118301 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8dr4\" (UniqueName: \"kubernetes.io/projected/c426809d-752d-4149-8ece-44dceba59124-kube-api-access-s8dr4\") pod \"redhat-marketplace-mdh55\" (UID: \"c426809d-752d-4149-8ece-44dceba59124\") " pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:22 crc kubenswrapper[4730]: I0930 10:58:22.118435 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c426809d-752d-4149-8ece-44dceba59124-catalog-content\") pod \"redhat-marketplace-mdh55\" (UID: \"c426809d-752d-4149-8ece-44dceba59124\") " pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:22 crc kubenswrapper[4730]: I0930 10:58:22.118606 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c426809d-752d-4149-8ece-44dceba59124-utilities\") pod \"redhat-marketplace-mdh55\" (UID: \"c426809d-752d-4149-8ece-44dceba59124\") " pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:22 crc kubenswrapper[4730]: I0930 10:58:22.119167 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c426809d-752d-4149-8ece-44dceba59124-utilities\") pod \"redhat-marketplace-mdh55\" (UID: \"c426809d-752d-4149-8ece-44dceba59124\") " pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:22 crc kubenswrapper[4730]: I0930 10:58:22.119461 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c426809d-752d-4149-8ece-44dceba59124-catalog-content\") pod \"redhat-marketplace-mdh55\" (UID: \"c426809d-752d-4149-8ece-44dceba59124\") " pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:22 crc kubenswrapper[4730]: I0930 10:58:22.138980 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8dr4\" (UniqueName: \"kubernetes.io/projected/c426809d-752d-4149-8ece-44dceba59124-kube-api-access-s8dr4\") pod \"redhat-marketplace-mdh55\" (UID: \"c426809d-752d-4149-8ece-44dceba59124\") " pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:22 crc kubenswrapper[4730]: I0930 10:58:22.209524 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:22 crc kubenswrapper[4730]: I0930 10:58:22.716974 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdh55"] Sep 30 10:58:23 crc kubenswrapper[4730]: I0930 10:58:23.555493 4730 generic.go:334] "Generic (PLEG): container finished" podID="c426809d-752d-4149-8ece-44dceba59124" containerID="f96c4d99d7ca2c2690759799b4ad1158c7c0345dcd51768d401e9ea4390f98e3" exitCode=0 Sep 30 10:58:23 crc kubenswrapper[4730]: I0930 10:58:23.555714 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdh55" event={"ID":"c426809d-752d-4149-8ece-44dceba59124","Type":"ContainerDied","Data":"f96c4d99d7ca2c2690759799b4ad1158c7c0345dcd51768d401e9ea4390f98e3"} Sep 30 10:58:23 crc kubenswrapper[4730]: I0930 10:58:23.555898 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdh55" event={"ID":"c426809d-752d-4149-8ece-44dceba59124","Type":"ContainerStarted","Data":"a7e7fdaf28a43c05cd06a56c940071220da407743aa7621393eea9cac86c9d7f"} Sep 30 10:58:23 crc kubenswrapper[4730]: I0930 10:58:23.558065 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 10:58:26 crc kubenswrapper[4730]: I0930 10:58:26.584840 4730 generic.go:334] "Generic (PLEG): container finished" podID="c426809d-752d-4149-8ece-44dceba59124" containerID="47521e17553f6a315f3728afd235650a2c688dc46b092c4b0851926b97d6fa6f" exitCode=0 Sep 30 10:58:26 crc kubenswrapper[4730]: I0930 10:58:26.585321 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdh55" event={"ID":"c426809d-752d-4149-8ece-44dceba59124","Type":"ContainerDied","Data":"47521e17553f6a315f3728afd235650a2c688dc46b092c4b0851926b97d6fa6f"} Sep 30 10:58:27 crc kubenswrapper[4730]: I0930 10:58:27.599035 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mdh55" event={"ID":"c426809d-752d-4149-8ece-44dceba59124","Type":"ContainerStarted","Data":"d487b153768f5d0af530ff6df1b5e3705a1791ff7a02853d47f043766f72de66"} Sep 30 10:58:27 crc kubenswrapper[4730]: I0930 10:58:27.630592 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mdh55" podStartSLOduration=3.176334772 podStartE2EDuration="6.630566773s" podCreationTimestamp="2025-09-30 10:58:21 +0000 UTC" firstStartedPulling="2025-09-30 10:58:23.557808341 +0000 UTC m=+4147.891068334" lastFinishedPulling="2025-09-30 10:58:27.012040342 +0000 UTC m=+4151.345300335" observedRunningTime="2025-09-30 10:58:27.621470764 +0000 UTC m=+4151.954730767" watchObservedRunningTime="2025-09-30 10:58:27.630566773 +0000 UTC m=+4151.963826786" Sep 30 10:58:32 crc kubenswrapper[4730]: I0930 10:58:32.210972 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:32 crc kubenswrapper[4730]: I0930 10:58:32.211551 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:32 crc kubenswrapper[4730]: I0930 10:58:32.268977 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:32 crc kubenswrapper[4730]: I0930 10:58:32.720010 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mdh55" Sep 30 10:58:32 crc kubenswrapper[4730]: I0930 10:58:32.796829 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mdh55"] Sep 30 10:58:32 crc kubenswrapper[4730]: I0930 10:58:32.839942 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgf65"] Sep 30 10:58:32 crc kubenswrapper[4730]: I0930 10:58:32.840229 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xgf65" podUID="01e08669-831d-492e-b8eb-302fac0ca54a" containerName="registry-server" containerID="cri-o://6d5e14d462d6805f187a3efee1a769baa9e7a117c4fab7d5423440714555a3b2" gracePeriod=2 Sep 30 10:58:33 crc kubenswrapper[4730]: I0930 10:58:33.669653 4730 generic.go:334] "Generic (PLEG): container finished" podID="01e08669-831d-492e-b8eb-302fac0ca54a" containerID="6d5e14d462d6805f187a3efee1a769baa9e7a117c4fab7d5423440714555a3b2" exitCode=0 Sep 30 10:58:33 crc kubenswrapper[4730]: I0930 10:58:33.670991 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgf65" event={"ID":"01e08669-831d-492e-b8eb-302fac0ca54a","Type":"ContainerDied","Data":"6d5e14d462d6805f187a3efee1a769baa9e7a117c4fab7d5423440714555a3b2"} Sep 30 10:58:33 crc kubenswrapper[4730]: I0930 10:58:33.771835 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 10:58:33 crc kubenswrapper[4730]: I0930 10:58:33.886364 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c86cq\" (UniqueName: \"kubernetes.io/projected/01e08669-831d-492e-b8eb-302fac0ca54a-kube-api-access-c86cq\") pod \"01e08669-831d-492e-b8eb-302fac0ca54a\" (UID: \"01e08669-831d-492e-b8eb-302fac0ca54a\") " Sep 30 10:58:33 crc kubenswrapper[4730]: I0930 10:58:33.886440 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01e08669-831d-492e-b8eb-302fac0ca54a-catalog-content\") pod \"01e08669-831d-492e-b8eb-302fac0ca54a\" (UID: \"01e08669-831d-492e-b8eb-302fac0ca54a\") " Sep 30 10:58:33 crc kubenswrapper[4730]: I0930 10:58:33.886541 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01e08669-831d-492e-b8eb-302fac0ca54a-utilities\") pod \"01e08669-831d-492e-b8eb-302fac0ca54a\" (UID: \"01e08669-831d-492e-b8eb-302fac0ca54a\") " Sep 30 10:58:33 crc kubenswrapper[4730]: I0930 10:58:33.888860 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01e08669-831d-492e-b8eb-302fac0ca54a-utilities" (OuterVolumeSpecName: "utilities") pod "01e08669-831d-492e-b8eb-302fac0ca54a" (UID: "01e08669-831d-492e-b8eb-302fac0ca54a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:58:33 crc kubenswrapper[4730]: I0930 10:58:33.900098 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01e08669-831d-492e-b8eb-302fac0ca54a-kube-api-access-c86cq" (OuterVolumeSpecName: "kube-api-access-c86cq") pod "01e08669-831d-492e-b8eb-302fac0ca54a" (UID: "01e08669-831d-492e-b8eb-302fac0ca54a"). InnerVolumeSpecName "kube-api-access-c86cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 10:58:33 crc kubenswrapper[4730]: I0930 10:58:33.907754 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01e08669-831d-492e-b8eb-302fac0ca54a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "01e08669-831d-492e-b8eb-302fac0ca54a" (UID: "01e08669-831d-492e-b8eb-302fac0ca54a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 10:58:33 crc kubenswrapper[4730]: I0930 10:58:33.988836 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c86cq\" (UniqueName: \"kubernetes.io/projected/01e08669-831d-492e-b8eb-302fac0ca54a-kube-api-access-c86cq\") on node \"crc\" DevicePath \"\"" Sep 30 10:58:33 crc kubenswrapper[4730]: I0930 10:58:33.989113 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01e08669-831d-492e-b8eb-302fac0ca54a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 10:58:33 crc kubenswrapper[4730]: I0930 10:58:33.989188 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01e08669-831d-492e-b8eb-302fac0ca54a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 10:58:34 crc kubenswrapper[4730]: I0930 10:58:34.683959 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xgf65" event={"ID":"01e08669-831d-492e-b8eb-302fac0ca54a","Type":"ContainerDied","Data":"124575caf1976ba8b7c2efefdef671cf51d49f949e1bf173dbc4f425adbd9ac0"} Sep 30 10:58:34 crc kubenswrapper[4730]: I0930 10:58:34.684379 4730 scope.go:117] "RemoveContainer" containerID="6d5e14d462d6805f187a3efee1a769baa9e7a117c4fab7d5423440714555a3b2" Sep 30 10:58:34 crc kubenswrapper[4730]: I0930 10:58:34.683994 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xgf65" Sep 30 10:58:34 crc kubenswrapper[4730]: I0930 10:58:34.715896 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgf65"] Sep 30 10:58:34 crc kubenswrapper[4730]: I0930 10:58:34.716713 4730 scope.go:117] "RemoveContainer" containerID="a926aa007275aaf05777adcdaf3a55727cd79fa172fadc723f1f0c970e5fa2bd" Sep 30 10:58:34 crc kubenswrapper[4730]: I0930 10:58:34.728531 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xgf65"] Sep 30 10:58:34 crc kubenswrapper[4730]: I0930 10:58:34.742934 4730 scope.go:117] "RemoveContainer" containerID="c4e69d6df38ca21d00d47d81fb0e053d55d3afbba8f845e9fbda2885e6ab3fbb" Sep 30 10:58:36 crc kubenswrapper[4730]: I0930 10:58:36.392467 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01e08669-831d-492e-b8eb-302fac0ca54a" path="/var/lib/kubelet/pods/01e08669-831d-492e-b8eb-302fac0ca54a/volumes" Sep 30 10:59:02 crc kubenswrapper[4730]: I0930 10:59:02.336523 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:59:02 crc kubenswrapper[4730]: I0930 10:59:02.337320 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 10:59:32 crc kubenswrapper[4730]: I0930 10:59:32.336701 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 10:59:32 crc kubenswrapper[4730]: I0930 10:59:32.337307 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.167734 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx"] Sep 30 11:00:00 crc kubenswrapper[4730]: E0930 11:00:00.168846 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01e08669-831d-492e-b8eb-302fac0ca54a" containerName="registry-server" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.168865 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="01e08669-831d-492e-b8eb-302fac0ca54a" containerName="registry-server" Sep 30 11:00:00 crc kubenswrapper[4730]: E0930 11:00:00.168897 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01e08669-831d-492e-b8eb-302fac0ca54a" containerName="extract-utilities" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.168906 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="01e08669-831d-492e-b8eb-302fac0ca54a" containerName="extract-utilities" Sep 30 11:00:00 crc kubenswrapper[4730]: E0930 11:00:00.168943 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01e08669-831d-492e-b8eb-302fac0ca54a" containerName="extract-content" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.168953 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="01e08669-831d-492e-b8eb-302fac0ca54a" containerName="extract-content" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.169198 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="01e08669-831d-492e-b8eb-302fac0ca54a" containerName="registry-server" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.170164 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.173757 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.174079 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.194840 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx"] Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.234994 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4clgl\" (UniqueName: \"kubernetes.io/projected/6868bf94-9e24-4cab-a99b-b180dcd281a5-kube-api-access-4clgl\") pod \"collect-profiles-29320500-d28lx\" (UID: \"6868bf94-9e24-4cab-a99b-b180dcd281a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.235088 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6868bf94-9e24-4cab-a99b-b180dcd281a5-secret-volume\") pod \"collect-profiles-29320500-d28lx\" (UID: \"6868bf94-9e24-4cab-a99b-b180dcd281a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.235456 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6868bf94-9e24-4cab-a99b-b180dcd281a5-config-volume\") pod \"collect-profiles-29320500-d28lx\" (UID: \"6868bf94-9e24-4cab-a99b-b180dcd281a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.338385 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4clgl\" (UniqueName: \"kubernetes.io/projected/6868bf94-9e24-4cab-a99b-b180dcd281a5-kube-api-access-4clgl\") pod \"collect-profiles-29320500-d28lx\" (UID: \"6868bf94-9e24-4cab-a99b-b180dcd281a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.338770 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6868bf94-9e24-4cab-a99b-b180dcd281a5-secret-volume\") pod \"collect-profiles-29320500-d28lx\" (UID: \"6868bf94-9e24-4cab-a99b-b180dcd281a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.339104 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6868bf94-9e24-4cab-a99b-b180dcd281a5-config-volume\") pod \"collect-profiles-29320500-d28lx\" (UID: \"6868bf94-9e24-4cab-a99b-b180dcd281a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.340003 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6868bf94-9e24-4cab-a99b-b180dcd281a5-config-volume\") pod \"collect-profiles-29320500-d28lx\" (UID: \"6868bf94-9e24-4cab-a99b-b180dcd281a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.346517 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6868bf94-9e24-4cab-a99b-b180dcd281a5-secret-volume\") pod \"collect-profiles-29320500-d28lx\" (UID: \"6868bf94-9e24-4cab-a99b-b180dcd281a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.359858 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4clgl\" (UniqueName: \"kubernetes.io/projected/6868bf94-9e24-4cab-a99b-b180dcd281a5-kube-api-access-4clgl\") pod \"collect-profiles-29320500-d28lx\" (UID: \"6868bf94-9e24-4cab-a99b-b180dcd281a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.491414 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" Sep 30 11:00:00 crc kubenswrapper[4730]: I0930 11:00:00.953535 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx"] Sep 30 11:00:01 crc kubenswrapper[4730]: I0930 11:00:01.679472 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" event={"ID":"6868bf94-9e24-4cab-a99b-b180dcd281a5","Type":"ContainerStarted","Data":"880a617c794f5584919e1df06085d189a09a6bef771d194957c79de0c6e22ca7"} Sep 30 11:00:01 crc kubenswrapper[4730]: I0930 11:00:01.683506 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" event={"ID":"6868bf94-9e24-4cab-a99b-b180dcd281a5","Type":"ContainerStarted","Data":"b5c3337c11991d6deb2b0e83c7afa5cab0b3e86de13c7ff23797eb763f94e777"} Sep 30 11:00:01 crc kubenswrapper[4730]: I0930 11:00:01.703041 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" podStartSLOduration=1.7030190589999998 podStartE2EDuration="1.703019059s" podCreationTimestamp="2025-09-30 11:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 11:00:01.696545329 +0000 UTC m=+4246.029805322" watchObservedRunningTime="2025-09-30 11:00:01.703019059 +0000 UTC m=+4246.036279052" Sep 30 11:00:02 crc kubenswrapper[4730]: I0930 11:00:02.336482 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:00:02 crc kubenswrapper[4730]: I0930 11:00:02.336548 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:00:02 crc kubenswrapper[4730]: I0930 11:00:02.336594 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 11:00:02 crc kubenswrapper[4730]: I0930 11:00:02.337239 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 11:00:02 crc kubenswrapper[4730]: I0930 11:00:02.337340 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" gracePeriod=600 Sep 30 11:00:02 crc kubenswrapper[4730]: E0930 11:00:02.466434 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:00:02 crc kubenswrapper[4730]: I0930 11:00:02.694008 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" exitCode=0 Sep 30 11:00:02 crc kubenswrapper[4730]: I0930 11:00:02.694093 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1"} Sep 30 11:00:02 crc kubenswrapper[4730]: I0930 11:00:02.694524 4730 scope.go:117] "RemoveContainer" containerID="47b7b782f1e001d15732ffb76128adb79231ada7ca853fc60fcf67f1fffb507e" Sep 30 11:00:02 crc kubenswrapper[4730]: I0930 11:00:02.695492 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:00:02 crc kubenswrapper[4730]: E0930 11:00:02.695923 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:00:02 crc kubenswrapper[4730]: I0930 11:00:02.697060 4730 generic.go:334] "Generic (PLEG): container finished" podID="6868bf94-9e24-4cab-a99b-b180dcd281a5" containerID="880a617c794f5584919e1df06085d189a09a6bef771d194957c79de0c6e22ca7" exitCode=0 Sep 30 11:00:02 crc kubenswrapper[4730]: I0930 11:00:02.697101 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" event={"ID":"6868bf94-9e24-4cab-a99b-b180dcd281a5","Type":"ContainerDied","Data":"880a617c794f5584919e1df06085d189a09a6bef771d194957c79de0c6e22ca7"} Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.206309 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.324510 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4clgl\" (UniqueName: \"kubernetes.io/projected/6868bf94-9e24-4cab-a99b-b180dcd281a5-kube-api-access-4clgl\") pod \"6868bf94-9e24-4cab-a99b-b180dcd281a5\" (UID: \"6868bf94-9e24-4cab-a99b-b180dcd281a5\") " Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.324561 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6868bf94-9e24-4cab-a99b-b180dcd281a5-config-volume\") pod \"6868bf94-9e24-4cab-a99b-b180dcd281a5\" (UID: \"6868bf94-9e24-4cab-a99b-b180dcd281a5\") " Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.324703 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6868bf94-9e24-4cab-a99b-b180dcd281a5-secret-volume\") pod \"6868bf94-9e24-4cab-a99b-b180dcd281a5\" (UID: \"6868bf94-9e24-4cab-a99b-b180dcd281a5\") " Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.325995 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6868bf94-9e24-4cab-a99b-b180dcd281a5-config-volume" (OuterVolumeSpecName: "config-volume") pod "6868bf94-9e24-4cab-a99b-b180dcd281a5" (UID: "6868bf94-9e24-4cab-a99b-b180dcd281a5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.330534 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6868bf94-9e24-4cab-a99b-b180dcd281a5-kube-api-access-4clgl" (OuterVolumeSpecName: "kube-api-access-4clgl") pod "6868bf94-9e24-4cab-a99b-b180dcd281a5" (UID: "6868bf94-9e24-4cab-a99b-b180dcd281a5"). InnerVolumeSpecName "kube-api-access-4clgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.331496 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6868bf94-9e24-4cab-a99b-b180dcd281a5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6868bf94-9e24-4cab-a99b-b180dcd281a5" (UID: "6868bf94-9e24-4cab-a99b-b180dcd281a5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.427122 4730 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6868bf94-9e24-4cab-a99b-b180dcd281a5-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.427160 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4clgl\" (UniqueName: \"kubernetes.io/projected/6868bf94-9e24-4cab-a99b-b180dcd281a5-kube-api-access-4clgl\") on node \"crc\" DevicePath \"\"" Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.427176 4730 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6868bf94-9e24-4cab-a99b-b180dcd281a5-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.725435 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" event={"ID":"6868bf94-9e24-4cab-a99b-b180dcd281a5","Type":"ContainerDied","Data":"b5c3337c11991d6deb2b0e83c7afa5cab0b3e86de13c7ff23797eb763f94e777"} Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.725477 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5c3337c11991d6deb2b0e83c7afa5cab0b3e86de13c7ff23797eb763f94e777" Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.725535 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx" Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.778548 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb"] Sep 30 11:00:04 crc kubenswrapper[4730]: I0930 11:00:04.786174 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320455-5kztb"] Sep 30 11:00:06 crc kubenswrapper[4730]: I0930 11:00:06.399828 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcbfe8d1-fee7-4f4a-92ab-ce604a005970" path="/var/lib/kubelet/pods/fcbfe8d1-fee7-4f4a-92ab-ce604a005970/volumes" Sep 30 11:00:14 crc kubenswrapper[4730]: I0930 11:00:14.381273 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:00:14 crc kubenswrapper[4730]: E0930 11:00:14.382248 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:00:29 crc kubenswrapper[4730]: I0930 11:00:29.380867 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:00:29 crc kubenswrapper[4730]: E0930 11:00:29.382847 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:00:32 crc kubenswrapper[4730]: I0930 11:00:32.054067 4730 scope.go:117] "RemoveContainer" containerID="06593d3f54df33993be4326aa4cac6ec6ee31030083c90bae682bdaf2e492751" Sep 30 11:00:44 crc kubenswrapper[4730]: I0930 11:00:44.381930 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:00:44 crc kubenswrapper[4730]: E0930 11:00:44.382817 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:00:55 crc kubenswrapper[4730]: I0930 11:00:55.381388 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:00:55 crc kubenswrapper[4730]: E0930 11:00:55.382531 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.169401 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29320501-wlbqz"] Sep 30 11:01:00 crc kubenswrapper[4730]: E0930 11:01:00.170658 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6868bf94-9e24-4cab-a99b-b180dcd281a5" containerName="collect-profiles" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.170681 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="6868bf94-9e24-4cab-a99b-b180dcd281a5" containerName="collect-profiles" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.171023 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="6868bf94-9e24-4cab-a99b-b180dcd281a5" containerName="collect-profiles" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.172024 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.185219 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29320501-wlbqz"] Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.320198 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-combined-ca-bundle\") pod \"keystone-cron-29320501-wlbqz\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.320596 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm6tc\" (UniqueName: \"kubernetes.io/projected/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-kube-api-access-jm6tc\") pod \"keystone-cron-29320501-wlbqz\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.320659 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-config-data\") pod \"keystone-cron-29320501-wlbqz\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.320826 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-fernet-keys\") pod \"keystone-cron-29320501-wlbqz\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.422990 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-combined-ca-bundle\") pod \"keystone-cron-29320501-wlbqz\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.423066 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm6tc\" (UniqueName: \"kubernetes.io/projected/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-kube-api-access-jm6tc\") pod \"keystone-cron-29320501-wlbqz\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.423094 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-config-data\") pod \"keystone-cron-29320501-wlbqz\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.423195 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-fernet-keys\") pod \"keystone-cron-29320501-wlbqz\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.429928 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-fernet-keys\") pod \"keystone-cron-29320501-wlbqz\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.430840 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-combined-ca-bundle\") pod \"keystone-cron-29320501-wlbqz\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.431374 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-config-data\") pod \"keystone-cron-29320501-wlbqz\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.442731 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm6tc\" (UniqueName: \"kubernetes.io/projected/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-kube-api-access-jm6tc\") pod \"keystone-cron-29320501-wlbqz\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:00 crc kubenswrapper[4730]: I0930 11:01:00.538218 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:01 crc kubenswrapper[4730]: W0930 11:01:00.999719 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a0c7d59_5820_461e_a4ee_7ff69b8feadd.slice/crio-e71f6c983b7d01ffabb9761aed099a762f067dae676476e135fb84183f4e7d7b WatchSource:0}: Error finding container e71f6c983b7d01ffabb9761aed099a762f067dae676476e135fb84183f4e7d7b: Status 404 returned error can't find the container with id e71f6c983b7d01ffabb9761aed099a762f067dae676476e135fb84183f4e7d7b Sep 30 11:01:01 crc kubenswrapper[4730]: I0930 11:01:01.003523 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29320501-wlbqz"] Sep 30 11:01:01 crc kubenswrapper[4730]: I0930 11:01:01.318699 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29320501-wlbqz" event={"ID":"8a0c7d59-5820-461e-a4ee-7ff69b8feadd","Type":"ContainerStarted","Data":"28cad1f6c4d0197ec76bcde3e1115c9d57445e048ef469ee47a0885d550c3bb0"} Sep 30 11:01:01 crc kubenswrapper[4730]: I0930 11:01:01.318983 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29320501-wlbqz" event={"ID":"8a0c7d59-5820-461e-a4ee-7ff69b8feadd","Type":"ContainerStarted","Data":"e71f6c983b7d01ffabb9761aed099a762f067dae676476e135fb84183f4e7d7b"} Sep 30 11:01:01 crc kubenswrapper[4730]: I0930 11:01:01.333114 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29320501-wlbqz" podStartSLOduration=1.333093471 podStartE2EDuration="1.333093471s" podCreationTimestamp="2025-09-30 11:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 11:01:01.332840395 +0000 UTC m=+4305.666100398" watchObservedRunningTime="2025-09-30 11:01:01.333093471 +0000 UTC m=+4305.666353474" Sep 30 11:01:07 crc kubenswrapper[4730]: I0930 11:01:07.394346 4730 generic.go:334] "Generic (PLEG): container finished" podID="8a0c7d59-5820-461e-a4ee-7ff69b8feadd" containerID="28cad1f6c4d0197ec76bcde3e1115c9d57445e048ef469ee47a0885d550c3bb0" exitCode=0 Sep 30 11:01:07 crc kubenswrapper[4730]: I0930 11:01:07.394426 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29320501-wlbqz" event={"ID":"8a0c7d59-5820-461e-a4ee-7ff69b8feadd","Type":"ContainerDied","Data":"28cad1f6c4d0197ec76bcde3e1115c9d57445e048ef469ee47a0885d550c3bb0"} Sep 30 11:01:08 crc kubenswrapper[4730]: I0930 11:01:08.381897 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:01:08 crc kubenswrapper[4730]: E0930 11:01:08.382673 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:01:08 crc kubenswrapper[4730]: I0930 11:01:08.793068 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:08 crc kubenswrapper[4730]: I0930 11:01:08.902446 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-fernet-keys\") pod \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " Sep 30 11:01:08 crc kubenswrapper[4730]: I0930 11:01:08.902594 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-combined-ca-bundle\") pod \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " Sep 30 11:01:08 crc kubenswrapper[4730]: I0930 11:01:08.902760 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jm6tc\" (UniqueName: \"kubernetes.io/projected/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-kube-api-access-jm6tc\") pod \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " Sep 30 11:01:08 crc kubenswrapper[4730]: I0930 11:01:08.902811 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-config-data\") pod \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\" (UID: \"8a0c7d59-5820-461e-a4ee-7ff69b8feadd\") " Sep 30 11:01:08 crc kubenswrapper[4730]: I0930 11:01:08.908208 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8a0c7d59-5820-461e-a4ee-7ff69b8feadd" (UID: "8a0c7d59-5820-461e-a4ee-7ff69b8feadd"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 11:01:08 crc kubenswrapper[4730]: I0930 11:01:08.910012 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-kube-api-access-jm6tc" (OuterVolumeSpecName: "kube-api-access-jm6tc") pod "8a0c7d59-5820-461e-a4ee-7ff69b8feadd" (UID: "8a0c7d59-5820-461e-a4ee-7ff69b8feadd"). InnerVolumeSpecName "kube-api-access-jm6tc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:01:08 crc kubenswrapper[4730]: I0930 11:01:08.951653 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a0c7d59-5820-461e-a4ee-7ff69b8feadd" (UID: "8a0c7d59-5820-461e-a4ee-7ff69b8feadd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 11:01:08 crc kubenswrapper[4730]: I0930 11:01:08.966766 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-config-data" (OuterVolumeSpecName: "config-data") pod "8a0c7d59-5820-461e-a4ee-7ff69b8feadd" (UID: "8a0c7d59-5820-461e-a4ee-7ff69b8feadd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 11:01:09 crc kubenswrapper[4730]: I0930 11:01:09.004792 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jm6tc\" (UniqueName: \"kubernetes.io/projected/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-kube-api-access-jm6tc\") on node \"crc\" DevicePath \"\"" Sep 30 11:01:09 crc kubenswrapper[4730]: I0930 11:01:09.004826 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 11:01:09 crc kubenswrapper[4730]: I0930 11:01:09.004837 4730 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 11:01:09 crc kubenswrapper[4730]: I0930 11:01:09.004845 4730 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a0c7d59-5820-461e-a4ee-7ff69b8feadd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 11:01:09 crc kubenswrapper[4730]: I0930 11:01:09.420387 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29320501-wlbqz" event={"ID":"8a0c7d59-5820-461e-a4ee-7ff69b8feadd","Type":"ContainerDied","Data":"e71f6c983b7d01ffabb9761aed099a762f067dae676476e135fb84183f4e7d7b"} Sep 30 11:01:09 crc kubenswrapper[4730]: I0930 11:01:09.420426 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e71f6c983b7d01ffabb9761aed099a762f067dae676476e135fb84183f4e7d7b" Sep 30 11:01:09 crc kubenswrapper[4730]: I0930 11:01:09.420508 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29320501-wlbqz" Sep 30 11:01:23 crc kubenswrapper[4730]: I0930 11:01:23.380655 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:01:23 crc kubenswrapper[4730]: E0930 11:01:23.381839 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:01:38 crc kubenswrapper[4730]: I0930 11:01:38.381860 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:01:38 crc kubenswrapper[4730]: E0930 11:01:38.382811 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:01:52 crc kubenswrapper[4730]: I0930 11:01:52.381061 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:01:52 crc kubenswrapper[4730]: E0930 11:01:52.381827 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.387370 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:02:06 crc kubenswrapper[4730]: E0930 11:02:06.388060 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.667875 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4fs9v"] Sep 30 11:02:06 crc kubenswrapper[4730]: E0930 11:02:06.668373 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a0c7d59-5820-461e-a4ee-7ff69b8feadd" containerName="keystone-cron" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.668418 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a0c7d59-5820-461e-a4ee-7ff69b8feadd" containerName="keystone-cron" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.668744 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a0c7d59-5820-461e-a4ee-7ff69b8feadd" containerName="keystone-cron" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.670520 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.712079 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4fs9v"] Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.750349 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-utilities\") pod \"redhat-operators-4fs9v\" (UID: \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\") " pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.750458 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5l2m\" (UniqueName: \"kubernetes.io/projected/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-kube-api-access-m5l2m\") pod \"redhat-operators-4fs9v\" (UID: \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\") " pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.750508 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-catalog-content\") pod \"redhat-operators-4fs9v\" (UID: \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\") " pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.852334 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-utilities\") pod \"redhat-operators-4fs9v\" (UID: \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\") " pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.852455 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5l2m\" (UniqueName: \"kubernetes.io/projected/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-kube-api-access-m5l2m\") pod \"redhat-operators-4fs9v\" (UID: \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\") " pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.852538 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-catalog-content\") pod \"redhat-operators-4fs9v\" (UID: \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\") " pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.853061 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-catalog-content\") pod \"redhat-operators-4fs9v\" (UID: \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\") " pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.853088 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-utilities\") pod \"redhat-operators-4fs9v\" (UID: \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\") " pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:06 crc kubenswrapper[4730]: I0930 11:02:06.872601 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5l2m\" (UniqueName: \"kubernetes.io/projected/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-kube-api-access-m5l2m\") pod \"redhat-operators-4fs9v\" (UID: \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\") " pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:07 crc kubenswrapper[4730]: I0930 11:02:07.001795 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:07 crc kubenswrapper[4730]: I0930 11:02:07.535709 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4fs9v"] Sep 30 11:02:08 crc kubenswrapper[4730]: I0930 11:02:08.023387 4730 generic.go:334] "Generic (PLEG): container finished" podID="4fb0f32d-6f73-4586-ba3d-e37c52aa7647" containerID="7923ac85f11d97958e343edc368c8882b0b8ca98a1591ea5384383fa64e8d226" exitCode=0 Sep 30 11:02:08 crc kubenswrapper[4730]: I0930 11:02:08.023639 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4fs9v" event={"ID":"4fb0f32d-6f73-4586-ba3d-e37c52aa7647","Type":"ContainerDied","Data":"7923ac85f11d97958e343edc368c8882b0b8ca98a1591ea5384383fa64e8d226"} Sep 30 11:02:08 crc kubenswrapper[4730]: I0930 11:02:08.023664 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4fs9v" event={"ID":"4fb0f32d-6f73-4586-ba3d-e37c52aa7647","Type":"ContainerStarted","Data":"e80512ea634a1e7cf6407c81368eac9760720ebce052a32686068d01f93bd1d4"} Sep 30 11:02:10 crc kubenswrapper[4730]: I0930 11:02:10.042707 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4fs9v" event={"ID":"4fb0f32d-6f73-4586-ba3d-e37c52aa7647","Type":"ContainerStarted","Data":"5a1c8acfbbccd0be1eebc4c4a37ea25e9131191fa5403e89f42371fda9a6cbf3"} Sep 30 11:02:15 crc kubenswrapper[4730]: I0930 11:02:15.107664 4730 generic.go:334] "Generic (PLEG): container finished" podID="4fb0f32d-6f73-4586-ba3d-e37c52aa7647" containerID="5a1c8acfbbccd0be1eebc4c4a37ea25e9131191fa5403e89f42371fda9a6cbf3" exitCode=0 Sep 30 11:02:15 crc kubenswrapper[4730]: I0930 11:02:15.107796 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4fs9v" event={"ID":"4fb0f32d-6f73-4586-ba3d-e37c52aa7647","Type":"ContainerDied","Data":"5a1c8acfbbccd0be1eebc4c4a37ea25e9131191fa5403e89f42371fda9a6cbf3"} Sep 30 11:02:16 crc kubenswrapper[4730]: I0930 11:02:16.123683 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4fs9v" event={"ID":"4fb0f32d-6f73-4586-ba3d-e37c52aa7647","Type":"ContainerStarted","Data":"ba1cf5a4bb629ad92f2f0124ea1e573b4c75d329e61d17f5095291979da5f86a"} Sep 30 11:02:16 crc kubenswrapper[4730]: I0930 11:02:16.153480 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4fs9v" podStartSLOduration=2.573025602 podStartE2EDuration="10.153456399s" podCreationTimestamp="2025-09-30 11:02:06 +0000 UTC" firstStartedPulling="2025-09-30 11:02:08.028381611 +0000 UTC m=+4372.361641604" lastFinishedPulling="2025-09-30 11:02:15.608812398 +0000 UTC m=+4379.942072401" observedRunningTime="2025-09-30 11:02:16.142104641 +0000 UTC m=+4380.475364644" watchObservedRunningTime="2025-09-30 11:02:16.153456399 +0000 UTC m=+4380.486716412" Sep 30 11:02:17 crc kubenswrapper[4730]: I0930 11:02:17.002249 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:17 crc kubenswrapper[4730]: I0930 11:02:17.002298 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:17 crc kubenswrapper[4730]: I0930 11:02:17.381134 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:02:17 crc kubenswrapper[4730]: E0930 11:02:17.381728 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:02:18 crc kubenswrapper[4730]: I0930 11:02:18.051343 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4fs9v" podUID="4fb0f32d-6f73-4586-ba3d-e37c52aa7647" containerName="registry-server" probeResult="failure" output=< Sep 30 11:02:18 crc kubenswrapper[4730]: timeout: failed to connect service ":50051" within 1s Sep 30 11:02:18 crc kubenswrapper[4730]: > Sep 30 11:02:27 crc kubenswrapper[4730]: I0930 11:02:27.076567 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:27 crc kubenswrapper[4730]: I0930 11:02:27.137526 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:27 crc kubenswrapper[4730]: I0930 11:02:27.313917 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4fs9v"] Sep 30 11:02:28 crc kubenswrapper[4730]: I0930 11:02:28.261140 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4fs9v" podUID="4fb0f32d-6f73-4586-ba3d-e37c52aa7647" containerName="registry-server" containerID="cri-o://ba1cf5a4bb629ad92f2f0124ea1e573b4c75d329e61d17f5095291979da5f86a" gracePeriod=2 Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.022781 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.166932 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-catalog-content\") pod \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\" (UID: \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\") " Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.167006 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5l2m\" (UniqueName: \"kubernetes.io/projected/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-kube-api-access-m5l2m\") pod \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\" (UID: \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\") " Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.167066 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-utilities\") pod \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\" (UID: \"4fb0f32d-6f73-4586-ba3d-e37c52aa7647\") " Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.167901 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-utilities" (OuterVolumeSpecName: "utilities") pod "4fb0f32d-6f73-4586-ba3d-e37c52aa7647" (UID: "4fb0f32d-6f73-4586-ba3d-e37c52aa7647"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.168280 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.174715 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-kube-api-access-m5l2m" (OuterVolumeSpecName: "kube-api-access-m5l2m") pod "4fb0f32d-6f73-4586-ba3d-e37c52aa7647" (UID: "4fb0f32d-6f73-4586-ba3d-e37c52aa7647"). InnerVolumeSpecName "kube-api-access-m5l2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.247857 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4fb0f32d-6f73-4586-ba3d-e37c52aa7647" (UID: "4fb0f32d-6f73-4586-ba3d-e37c52aa7647"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.270783 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.270833 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5l2m\" (UniqueName: \"kubernetes.io/projected/4fb0f32d-6f73-4586-ba3d-e37c52aa7647-kube-api-access-m5l2m\") on node \"crc\" DevicePath \"\"" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.276123 4730 generic.go:334] "Generic (PLEG): container finished" podID="4fb0f32d-6f73-4586-ba3d-e37c52aa7647" containerID="ba1cf5a4bb629ad92f2f0124ea1e573b4c75d329e61d17f5095291979da5f86a" exitCode=0 Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.276202 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4fs9v" event={"ID":"4fb0f32d-6f73-4586-ba3d-e37c52aa7647","Type":"ContainerDied","Data":"ba1cf5a4bb629ad92f2f0124ea1e573b4c75d329e61d17f5095291979da5f86a"} Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.276249 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4fs9v" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.276298 4730 scope.go:117] "RemoveContainer" containerID="ba1cf5a4bb629ad92f2f0124ea1e573b4c75d329e61d17f5095291979da5f86a" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.276274 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4fs9v" event={"ID":"4fb0f32d-6f73-4586-ba3d-e37c52aa7647","Type":"ContainerDied","Data":"e80512ea634a1e7cf6407c81368eac9760720ebce052a32686068d01f93bd1d4"} Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.299639 4730 scope.go:117] "RemoveContainer" containerID="5a1c8acfbbccd0be1eebc4c4a37ea25e9131191fa5403e89f42371fda9a6cbf3" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.321778 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4fs9v"] Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.332013 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4fs9v"] Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.341080 4730 scope.go:117] "RemoveContainer" containerID="7923ac85f11d97958e343edc368c8882b0b8ca98a1591ea5384383fa64e8d226" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.379020 4730 scope.go:117] "RemoveContainer" containerID="ba1cf5a4bb629ad92f2f0124ea1e573b4c75d329e61d17f5095291979da5f86a" Sep 30 11:02:29 crc kubenswrapper[4730]: E0930 11:02:29.379494 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba1cf5a4bb629ad92f2f0124ea1e573b4c75d329e61d17f5095291979da5f86a\": container with ID starting with ba1cf5a4bb629ad92f2f0124ea1e573b4c75d329e61d17f5095291979da5f86a not found: ID does not exist" containerID="ba1cf5a4bb629ad92f2f0124ea1e573b4c75d329e61d17f5095291979da5f86a" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.379528 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba1cf5a4bb629ad92f2f0124ea1e573b4c75d329e61d17f5095291979da5f86a"} err="failed to get container status \"ba1cf5a4bb629ad92f2f0124ea1e573b4c75d329e61d17f5095291979da5f86a\": rpc error: code = NotFound desc = could not find container \"ba1cf5a4bb629ad92f2f0124ea1e573b4c75d329e61d17f5095291979da5f86a\": container with ID starting with ba1cf5a4bb629ad92f2f0124ea1e573b4c75d329e61d17f5095291979da5f86a not found: ID does not exist" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.379550 4730 scope.go:117] "RemoveContainer" containerID="5a1c8acfbbccd0be1eebc4c4a37ea25e9131191fa5403e89f42371fda9a6cbf3" Sep 30 11:02:29 crc kubenswrapper[4730]: E0930 11:02:29.380031 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a1c8acfbbccd0be1eebc4c4a37ea25e9131191fa5403e89f42371fda9a6cbf3\": container with ID starting with 5a1c8acfbbccd0be1eebc4c4a37ea25e9131191fa5403e89f42371fda9a6cbf3 not found: ID does not exist" containerID="5a1c8acfbbccd0be1eebc4c4a37ea25e9131191fa5403e89f42371fda9a6cbf3" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.380093 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a1c8acfbbccd0be1eebc4c4a37ea25e9131191fa5403e89f42371fda9a6cbf3"} err="failed to get container status \"5a1c8acfbbccd0be1eebc4c4a37ea25e9131191fa5403e89f42371fda9a6cbf3\": rpc error: code = NotFound desc = could not find container \"5a1c8acfbbccd0be1eebc4c4a37ea25e9131191fa5403e89f42371fda9a6cbf3\": container with ID starting with 5a1c8acfbbccd0be1eebc4c4a37ea25e9131191fa5403e89f42371fda9a6cbf3 not found: ID does not exist" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.380135 4730 scope.go:117] "RemoveContainer" containerID="7923ac85f11d97958e343edc368c8882b0b8ca98a1591ea5384383fa64e8d226" Sep 30 11:02:29 crc kubenswrapper[4730]: E0930 11:02:29.380445 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7923ac85f11d97958e343edc368c8882b0b8ca98a1591ea5384383fa64e8d226\": container with ID starting with 7923ac85f11d97958e343edc368c8882b0b8ca98a1591ea5384383fa64e8d226 not found: ID does not exist" containerID="7923ac85f11d97958e343edc368c8882b0b8ca98a1591ea5384383fa64e8d226" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.380474 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7923ac85f11d97958e343edc368c8882b0b8ca98a1591ea5384383fa64e8d226"} err="failed to get container status \"7923ac85f11d97958e343edc368c8882b0b8ca98a1591ea5384383fa64e8d226\": rpc error: code = NotFound desc = could not find container \"7923ac85f11d97958e343edc368c8882b0b8ca98a1591ea5384383fa64e8d226\": container with ID starting with 7923ac85f11d97958e343edc368c8882b0b8ca98a1591ea5384383fa64e8d226 not found: ID does not exist" Sep 30 11:02:29 crc kubenswrapper[4730]: I0930 11:02:29.381426 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:02:29 crc kubenswrapper[4730]: E0930 11:02:29.381764 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:02:30 crc kubenswrapper[4730]: I0930 11:02:30.398450 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fb0f32d-6f73-4586-ba3d-e37c52aa7647" path="/var/lib/kubelet/pods/4fb0f32d-6f73-4586-ba3d-e37c52aa7647/volumes" Sep 30 11:02:42 crc kubenswrapper[4730]: I0930 11:02:42.381231 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:02:42 crc kubenswrapper[4730]: E0930 11:02:42.382070 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:02:56 crc kubenswrapper[4730]: I0930 11:02:56.393476 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:02:56 crc kubenswrapper[4730]: E0930 11:02:56.394460 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:03:07 crc kubenswrapper[4730]: I0930 11:03:07.382099 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:03:07 crc kubenswrapper[4730]: E0930 11:03:07.383471 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:03:19 crc kubenswrapper[4730]: I0930 11:03:19.381326 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:03:19 crc kubenswrapper[4730]: E0930 11:03:19.384930 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:03:32 crc kubenswrapper[4730]: I0930 11:03:32.380729 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:03:32 crc kubenswrapper[4730]: E0930 11:03:32.381472 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:03:43 crc kubenswrapper[4730]: I0930 11:03:43.381494 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:03:43 crc kubenswrapper[4730]: E0930 11:03:43.382129 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:03:57 crc kubenswrapper[4730]: I0930 11:03:57.381334 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:03:57 crc kubenswrapper[4730]: E0930 11:03:57.382253 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:04:08 crc kubenswrapper[4730]: I0930 11:04:08.382999 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:04:08 crc kubenswrapper[4730]: E0930 11:04:08.383912 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:04:23 crc kubenswrapper[4730]: I0930 11:04:23.380638 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:04:23 crc kubenswrapper[4730]: E0930 11:04:23.381382 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:04:36 crc kubenswrapper[4730]: I0930 11:04:36.387212 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:04:36 crc kubenswrapper[4730]: E0930 11:04:36.388021 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:04:40 crc kubenswrapper[4730]: E0930 11:04:40.854012 4730 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.104:58482->38.102.83.104:41011: write tcp 38.102.83.104:58482->38.102.83.104:41011: write: broken pipe Sep 30 11:04:49 crc kubenswrapper[4730]: I0930 11:04:49.381125 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:04:49 crc kubenswrapper[4730]: E0930 11:04:49.382000 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:05:00 crc kubenswrapper[4730]: I0930 11:05:00.380953 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:05:00 crc kubenswrapper[4730]: E0930 11:05:00.382946 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:05:14 crc kubenswrapper[4730]: I0930 11:05:14.382219 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:05:15 crc kubenswrapper[4730]: I0930 11:05:15.028820 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"97fd2743ac185e4a3c882330a4b61abe84b019d15703849796b6cc4b51ad5cbd"} Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.718080 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-v6rm4"] Sep 30 11:06:50 crc kubenswrapper[4730]: E0930 11:06:50.719151 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb0f32d-6f73-4586-ba3d-e37c52aa7647" containerName="extract-utilities" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.719169 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb0f32d-6f73-4586-ba3d-e37c52aa7647" containerName="extract-utilities" Sep 30 11:06:50 crc kubenswrapper[4730]: E0930 11:06:50.719186 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb0f32d-6f73-4586-ba3d-e37c52aa7647" containerName="registry-server" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.719197 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb0f32d-6f73-4586-ba3d-e37c52aa7647" containerName="registry-server" Sep 30 11:06:50 crc kubenswrapper[4730]: E0930 11:06:50.719214 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb0f32d-6f73-4586-ba3d-e37c52aa7647" containerName="extract-content" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.719222 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb0f32d-6f73-4586-ba3d-e37c52aa7647" containerName="extract-content" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.719487 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fb0f32d-6f73-4586-ba3d-e37c52aa7647" containerName="registry-server" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.721374 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.743387 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v6rm4"] Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.805300 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwxhz\" (UniqueName: \"kubernetes.io/projected/a091f260-9672-424c-9924-baf3495f3ded-kube-api-access-zwxhz\") pod \"certified-operators-v6rm4\" (UID: \"a091f260-9672-424c-9924-baf3495f3ded\") " pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.805463 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a091f260-9672-424c-9924-baf3495f3ded-utilities\") pod \"certified-operators-v6rm4\" (UID: \"a091f260-9672-424c-9924-baf3495f3ded\") " pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.805562 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a091f260-9672-424c-9924-baf3495f3ded-catalog-content\") pod \"certified-operators-v6rm4\" (UID: \"a091f260-9672-424c-9924-baf3495f3ded\") " pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.907120 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a091f260-9672-424c-9924-baf3495f3ded-utilities\") pod \"certified-operators-v6rm4\" (UID: \"a091f260-9672-424c-9924-baf3495f3ded\") " pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.907272 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a091f260-9672-424c-9924-baf3495f3ded-catalog-content\") pod \"certified-operators-v6rm4\" (UID: \"a091f260-9672-424c-9924-baf3495f3ded\") " pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.907331 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwxhz\" (UniqueName: \"kubernetes.io/projected/a091f260-9672-424c-9924-baf3495f3ded-kube-api-access-zwxhz\") pod \"certified-operators-v6rm4\" (UID: \"a091f260-9672-424c-9924-baf3495f3ded\") " pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.907890 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a091f260-9672-424c-9924-baf3495f3ded-utilities\") pod \"certified-operators-v6rm4\" (UID: \"a091f260-9672-424c-9924-baf3495f3ded\") " pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.908055 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a091f260-9672-424c-9924-baf3495f3ded-catalog-content\") pod \"certified-operators-v6rm4\" (UID: \"a091f260-9672-424c-9924-baf3495f3ded\") " pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:06:50 crc kubenswrapper[4730]: I0930 11:06:50.929188 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwxhz\" (UniqueName: \"kubernetes.io/projected/a091f260-9672-424c-9924-baf3495f3ded-kube-api-access-zwxhz\") pod \"certified-operators-v6rm4\" (UID: \"a091f260-9672-424c-9924-baf3495f3ded\") " pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:06:51 crc kubenswrapper[4730]: I0930 11:06:51.055009 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:06:51 crc kubenswrapper[4730]: I0930 11:06:51.617463 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v6rm4"] Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.117547 4730 generic.go:334] "Generic (PLEG): container finished" podID="a091f260-9672-424c-9924-baf3495f3ded" containerID="8f48f5e35a23f140363f53a23948eee4519651f3f076796e60512f105a3bd888" exitCode=0 Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.117671 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v6rm4" event={"ID":"a091f260-9672-424c-9924-baf3495f3ded","Type":"ContainerDied","Data":"8f48f5e35a23f140363f53a23948eee4519651f3f076796e60512f105a3bd888"} Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.120966 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v6rm4" event={"ID":"a091f260-9672-424c-9924-baf3495f3ded","Type":"ContainerStarted","Data":"55acea15ec34127e47caf113e6154b0af458e4cd023fe2277bd0af096e771afa"} Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.121169 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-w2796"] Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.120596 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.127149 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w2796" Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.132247 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/debee974-cb8b-4db6-bd4e-620b03832053-catalog-content\") pod \"community-operators-w2796\" (UID: \"debee974-cb8b-4db6-bd4e-620b03832053\") " pod="openshift-marketplace/community-operators-w2796" Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.132410 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2g4r\" (UniqueName: \"kubernetes.io/projected/debee974-cb8b-4db6-bd4e-620b03832053-kube-api-access-f2g4r\") pod \"community-operators-w2796\" (UID: \"debee974-cb8b-4db6-bd4e-620b03832053\") " pod="openshift-marketplace/community-operators-w2796" Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.132589 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/debee974-cb8b-4db6-bd4e-620b03832053-utilities\") pod \"community-operators-w2796\" (UID: \"debee974-cb8b-4db6-bd4e-620b03832053\") " pod="openshift-marketplace/community-operators-w2796" Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.142028 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w2796"] Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.234756 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/debee974-cb8b-4db6-bd4e-620b03832053-catalog-content\") pod \"community-operators-w2796\" (UID: \"debee974-cb8b-4db6-bd4e-620b03832053\") " pod="openshift-marketplace/community-operators-w2796" Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.234900 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2g4r\" (UniqueName: \"kubernetes.io/projected/debee974-cb8b-4db6-bd4e-620b03832053-kube-api-access-f2g4r\") pod \"community-operators-w2796\" (UID: \"debee974-cb8b-4db6-bd4e-620b03832053\") " pod="openshift-marketplace/community-operators-w2796" Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.235056 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/debee974-cb8b-4db6-bd4e-620b03832053-utilities\") pod \"community-operators-w2796\" (UID: \"debee974-cb8b-4db6-bd4e-620b03832053\") " pod="openshift-marketplace/community-operators-w2796" Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.235717 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/debee974-cb8b-4db6-bd4e-620b03832053-utilities\") pod \"community-operators-w2796\" (UID: \"debee974-cb8b-4db6-bd4e-620b03832053\") " pod="openshift-marketplace/community-operators-w2796" Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.235987 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/debee974-cb8b-4db6-bd4e-620b03832053-catalog-content\") pod \"community-operators-w2796\" (UID: \"debee974-cb8b-4db6-bd4e-620b03832053\") " pod="openshift-marketplace/community-operators-w2796" Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.652412 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2g4r\" (UniqueName: \"kubernetes.io/projected/debee974-cb8b-4db6-bd4e-620b03832053-kube-api-access-f2g4r\") pod \"community-operators-w2796\" (UID: \"debee974-cb8b-4db6-bd4e-620b03832053\") " pod="openshift-marketplace/community-operators-w2796" Sep 30 11:06:52 crc kubenswrapper[4730]: I0930 11:06:52.765946 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w2796" Sep 30 11:06:53 crc kubenswrapper[4730]: I0930 11:06:53.195263 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w2796"] Sep 30 11:06:53 crc kubenswrapper[4730]: W0930 11:06:53.196045 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddebee974_cb8b_4db6_bd4e_620b03832053.slice/crio-28ccc005b6938d0f5310c7c605e203b61445cc650bc46bb24debe739afd614d6 WatchSource:0}: Error finding container 28ccc005b6938d0f5310c7c605e203b61445cc650bc46bb24debe739afd614d6: Status 404 returned error can't find the container with id 28ccc005b6938d0f5310c7c605e203b61445cc650bc46bb24debe739afd614d6 Sep 30 11:06:54 crc kubenswrapper[4730]: I0930 11:06:54.150012 4730 generic.go:334] "Generic (PLEG): container finished" podID="debee974-cb8b-4db6-bd4e-620b03832053" containerID="7fd09e5a505c014315b791df8da6dfe2ec42497b4afb3d948ba0b83062882b03" exitCode=0 Sep 30 11:06:54 crc kubenswrapper[4730]: I0930 11:06:54.150102 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w2796" event={"ID":"debee974-cb8b-4db6-bd4e-620b03832053","Type":"ContainerDied","Data":"7fd09e5a505c014315b791df8da6dfe2ec42497b4afb3d948ba0b83062882b03"} Sep 30 11:06:54 crc kubenswrapper[4730]: I0930 11:06:54.150294 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w2796" event={"ID":"debee974-cb8b-4db6-bd4e-620b03832053","Type":"ContainerStarted","Data":"28ccc005b6938d0f5310c7c605e203b61445cc650bc46bb24debe739afd614d6"} Sep 30 11:06:54 crc kubenswrapper[4730]: I0930 11:06:54.153649 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v6rm4" event={"ID":"a091f260-9672-424c-9924-baf3495f3ded","Type":"ContainerStarted","Data":"c0b00b2492b84c8cdd77e2eb77ab533be52757ba9f31768e035a945cb1171473"} Sep 30 11:06:55 crc kubenswrapper[4730]: I0930 11:06:55.165066 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w2796" event={"ID":"debee974-cb8b-4db6-bd4e-620b03832053","Type":"ContainerStarted","Data":"046694504eb01656962804ac4e59c4a4776ead8f2dee48b0bdf6d4f2b2ab313a"} Sep 30 11:06:55 crc kubenswrapper[4730]: I0930 11:06:55.168681 4730 generic.go:334] "Generic (PLEG): container finished" podID="a091f260-9672-424c-9924-baf3495f3ded" containerID="c0b00b2492b84c8cdd77e2eb77ab533be52757ba9f31768e035a945cb1171473" exitCode=0 Sep 30 11:06:55 crc kubenswrapper[4730]: I0930 11:06:55.168720 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v6rm4" event={"ID":"a091f260-9672-424c-9924-baf3495f3ded","Type":"ContainerDied","Data":"c0b00b2492b84c8cdd77e2eb77ab533be52757ba9f31768e035a945cb1171473"} Sep 30 11:06:56 crc kubenswrapper[4730]: I0930 11:06:56.182146 4730 generic.go:334] "Generic (PLEG): container finished" podID="debee974-cb8b-4db6-bd4e-620b03832053" containerID="046694504eb01656962804ac4e59c4a4776ead8f2dee48b0bdf6d4f2b2ab313a" exitCode=0 Sep 30 11:06:56 crc kubenswrapper[4730]: I0930 11:06:56.182262 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w2796" event={"ID":"debee974-cb8b-4db6-bd4e-620b03832053","Type":"ContainerDied","Data":"046694504eb01656962804ac4e59c4a4776ead8f2dee48b0bdf6d4f2b2ab313a"} Sep 30 11:06:56 crc kubenswrapper[4730]: I0930 11:06:56.190464 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v6rm4" event={"ID":"a091f260-9672-424c-9924-baf3495f3ded","Type":"ContainerStarted","Data":"f22a4d139b3188a80c992e71f628dabe973929683984f09f221629740cf978e0"} Sep 30 11:06:56 crc kubenswrapper[4730]: I0930 11:06:56.243944 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-v6rm4" podStartSLOduration=2.739295231 podStartE2EDuration="6.243922415s" podCreationTimestamp="2025-09-30 11:06:50 +0000 UTC" firstStartedPulling="2025-09-30 11:06:52.120180125 +0000 UTC m=+4656.453440158" lastFinishedPulling="2025-09-30 11:06:55.624807359 +0000 UTC m=+4659.958067342" observedRunningTime="2025-09-30 11:06:56.232587457 +0000 UTC m=+4660.565847480" watchObservedRunningTime="2025-09-30 11:06:56.243922415 +0000 UTC m=+4660.577182418" Sep 30 11:06:58 crc kubenswrapper[4730]: I0930 11:06:58.213356 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w2796" event={"ID":"debee974-cb8b-4db6-bd4e-620b03832053","Type":"ContainerStarted","Data":"99e569687d4bac0bee3f5ed59bc5f2961ec835e78927bfe1eface209fc420f69"} Sep 30 11:06:58 crc kubenswrapper[4730]: I0930 11:06:58.237764 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-w2796" podStartSLOduration=3.239746488 podStartE2EDuration="6.237745028s" podCreationTimestamp="2025-09-30 11:06:52 +0000 UTC" firstStartedPulling="2025-09-30 11:06:54.15254253 +0000 UTC m=+4658.485802533" lastFinishedPulling="2025-09-30 11:06:57.15054108 +0000 UTC m=+4661.483801073" observedRunningTime="2025-09-30 11:06:58.228792373 +0000 UTC m=+4662.562052376" watchObservedRunningTime="2025-09-30 11:06:58.237745028 +0000 UTC m=+4662.571005021" Sep 30 11:07:01 crc kubenswrapper[4730]: I0930 11:07:01.055940 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:07:01 crc kubenswrapper[4730]: I0930 11:07:01.056280 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:07:01 crc kubenswrapper[4730]: I0930 11:07:01.151599 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:07:01 crc kubenswrapper[4730]: I0930 11:07:01.327063 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:07:02 crc kubenswrapper[4730]: I0930 11:07:02.292188 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v6rm4"] Sep 30 11:07:02 crc kubenswrapper[4730]: I0930 11:07:02.767244 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-w2796" Sep 30 11:07:02 crc kubenswrapper[4730]: I0930 11:07:02.767680 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-w2796" Sep 30 11:07:03 crc kubenswrapper[4730]: I0930 11:07:03.281085 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-v6rm4" podUID="a091f260-9672-424c-9924-baf3495f3ded" containerName="registry-server" containerID="cri-o://f22a4d139b3188a80c992e71f628dabe973929683984f09f221629740cf978e0" gracePeriod=2 Sep 30 11:07:03 crc kubenswrapper[4730]: I0930 11:07:03.597954 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-w2796" Sep 30 11:07:03 crc kubenswrapper[4730]: I0930 11:07:03.918782 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.034808 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a091f260-9672-424c-9924-baf3495f3ded-utilities\") pod \"a091f260-9672-424c-9924-baf3495f3ded\" (UID: \"a091f260-9672-424c-9924-baf3495f3ded\") " Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.035213 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a091f260-9672-424c-9924-baf3495f3ded-catalog-content\") pod \"a091f260-9672-424c-9924-baf3495f3ded\" (UID: \"a091f260-9672-424c-9924-baf3495f3ded\") " Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.035426 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwxhz\" (UniqueName: \"kubernetes.io/projected/a091f260-9672-424c-9924-baf3495f3ded-kube-api-access-zwxhz\") pod \"a091f260-9672-424c-9924-baf3495f3ded\" (UID: \"a091f260-9672-424c-9924-baf3495f3ded\") " Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.035732 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a091f260-9672-424c-9924-baf3495f3ded-utilities" (OuterVolumeSpecName: "utilities") pod "a091f260-9672-424c-9924-baf3495f3ded" (UID: "a091f260-9672-424c-9924-baf3495f3ded"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.036128 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a091f260-9672-424c-9924-baf3495f3ded-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.041197 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a091f260-9672-424c-9924-baf3495f3ded-kube-api-access-zwxhz" (OuterVolumeSpecName: "kube-api-access-zwxhz") pod "a091f260-9672-424c-9924-baf3495f3ded" (UID: "a091f260-9672-424c-9924-baf3495f3ded"). InnerVolumeSpecName "kube-api-access-zwxhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.079201 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a091f260-9672-424c-9924-baf3495f3ded-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a091f260-9672-424c-9924-baf3495f3ded" (UID: "a091f260-9672-424c-9924-baf3495f3ded"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.137711 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a091f260-9672-424c-9924-baf3495f3ded-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.137742 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwxhz\" (UniqueName: \"kubernetes.io/projected/a091f260-9672-424c-9924-baf3495f3ded-kube-api-access-zwxhz\") on node \"crc\" DevicePath \"\"" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.294925 4730 generic.go:334] "Generic (PLEG): container finished" podID="a091f260-9672-424c-9924-baf3495f3ded" containerID="f22a4d139b3188a80c992e71f628dabe973929683984f09f221629740cf978e0" exitCode=0 Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.296375 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v6rm4" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.298307 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v6rm4" event={"ID":"a091f260-9672-424c-9924-baf3495f3ded","Type":"ContainerDied","Data":"f22a4d139b3188a80c992e71f628dabe973929683984f09f221629740cf978e0"} Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.298355 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v6rm4" event={"ID":"a091f260-9672-424c-9924-baf3495f3ded","Type":"ContainerDied","Data":"55acea15ec34127e47caf113e6154b0af458e4cd023fe2277bd0af096e771afa"} Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.298378 4730 scope.go:117] "RemoveContainer" containerID="f22a4d139b3188a80c992e71f628dabe973929683984f09f221629740cf978e0" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.338278 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v6rm4"] Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.339730 4730 scope.go:117] "RemoveContainer" containerID="c0b00b2492b84c8cdd77e2eb77ab533be52757ba9f31768e035a945cb1171473" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.346293 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-v6rm4"] Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.363247 4730 scope.go:117] "RemoveContainer" containerID="8f48f5e35a23f140363f53a23948eee4519651f3f076796e60512f105a3bd888" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.368995 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-w2796" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.405677 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a091f260-9672-424c-9924-baf3495f3ded" path="/var/lib/kubelet/pods/a091f260-9672-424c-9924-baf3495f3ded/volumes" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.413328 4730 scope.go:117] "RemoveContainer" containerID="f22a4d139b3188a80c992e71f628dabe973929683984f09f221629740cf978e0" Sep 30 11:07:04 crc kubenswrapper[4730]: E0930 11:07:04.415023 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f22a4d139b3188a80c992e71f628dabe973929683984f09f221629740cf978e0\": container with ID starting with f22a4d139b3188a80c992e71f628dabe973929683984f09f221629740cf978e0 not found: ID does not exist" containerID="f22a4d139b3188a80c992e71f628dabe973929683984f09f221629740cf978e0" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.415058 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f22a4d139b3188a80c992e71f628dabe973929683984f09f221629740cf978e0"} err="failed to get container status \"f22a4d139b3188a80c992e71f628dabe973929683984f09f221629740cf978e0\": rpc error: code = NotFound desc = could not find container \"f22a4d139b3188a80c992e71f628dabe973929683984f09f221629740cf978e0\": container with ID starting with f22a4d139b3188a80c992e71f628dabe973929683984f09f221629740cf978e0 not found: ID does not exist" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.415084 4730 scope.go:117] "RemoveContainer" containerID="c0b00b2492b84c8cdd77e2eb77ab533be52757ba9f31768e035a945cb1171473" Sep 30 11:07:04 crc kubenswrapper[4730]: E0930 11:07:04.415323 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0b00b2492b84c8cdd77e2eb77ab533be52757ba9f31768e035a945cb1171473\": container with ID starting with c0b00b2492b84c8cdd77e2eb77ab533be52757ba9f31768e035a945cb1171473 not found: ID does not exist" containerID="c0b00b2492b84c8cdd77e2eb77ab533be52757ba9f31768e035a945cb1171473" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.415347 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0b00b2492b84c8cdd77e2eb77ab533be52757ba9f31768e035a945cb1171473"} err="failed to get container status \"c0b00b2492b84c8cdd77e2eb77ab533be52757ba9f31768e035a945cb1171473\": rpc error: code = NotFound desc = could not find container \"c0b00b2492b84c8cdd77e2eb77ab533be52757ba9f31768e035a945cb1171473\": container with ID starting with c0b00b2492b84c8cdd77e2eb77ab533be52757ba9f31768e035a945cb1171473 not found: ID does not exist" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.415361 4730 scope.go:117] "RemoveContainer" containerID="8f48f5e35a23f140363f53a23948eee4519651f3f076796e60512f105a3bd888" Sep 30 11:07:04 crc kubenswrapper[4730]: E0930 11:07:04.415590 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f48f5e35a23f140363f53a23948eee4519651f3f076796e60512f105a3bd888\": container with ID starting with 8f48f5e35a23f140363f53a23948eee4519651f3f076796e60512f105a3bd888 not found: ID does not exist" containerID="8f48f5e35a23f140363f53a23948eee4519651f3f076796e60512f105a3bd888" Sep 30 11:07:04 crc kubenswrapper[4730]: I0930 11:07:04.415606 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f48f5e35a23f140363f53a23948eee4519651f3f076796e60512f105a3bd888"} err="failed to get container status \"8f48f5e35a23f140363f53a23948eee4519651f3f076796e60512f105a3bd888\": rpc error: code = NotFound desc = could not find container \"8f48f5e35a23f140363f53a23948eee4519651f3f076796e60512f105a3bd888\": container with ID starting with 8f48f5e35a23f140363f53a23948eee4519651f3f076796e60512f105a3bd888 not found: ID does not exist" Sep 30 11:07:05 crc kubenswrapper[4730]: I0930 11:07:05.292304 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w2796"] Sep 30 11:07:06 crc kubenswrapper[4730]: I0930 11:07:06.314545 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-w2796" podUID="debee974-cb8b-4db6-bd4e-620b03832053" containerName="registry-server" containerID="cri-o://99e569687d4bac0bee3f5ed59bc5f2961ec835e78927bfe1eface209fc420f69" gracePeriod=2 Sep 30 11:07:06 crc kubenswrapper[4730]: I0930 11:07:06.868556 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w2796" Sep 30 11:07:06 crc kubenswrapper[4730]: I0930 11:07:06.900229 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/debee974-cb8b-4db6-bd4e-620b03832053-utilities\") pod \"debee974-cb8b-4db6-bd4e-620b03832053\" (UID: \"debee974-cb8b-4db6-bd4e-620b03832053\") " Sep 30 11:07:06 crc kubenswrapper[4730]: I0930 11:07:06.900495 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/debee974-cb8b-4db6-bd4e-620b03832053-catalog-content\") pod \"debee974-cb8b-4db6-bd4e-620b03832053\" (UID: \"debee974-cb8b-4db6-bd4e-620b03832053\") " Sep 30 11:07:06 crc kubenswrapper[4730]: I0930 11:07:06.900568 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2g4r\" (UniqueName: \"kubernetes.io/projected/debee974-cb8b-4db6-bd4e-620b03832053-kube-api-access-f2g4r\") pod \"debee974-cb8b-4db6-bd4e-620b03832053\" (UID: \"debee974-cb8b-4db6-bd4e-620b03832053\") " Sep 30 11:07:06 crc kubenswrapper[4730]: I0930 11:07:06.903372 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/debee974-cb8b-4db6-bd4e-620b03832053-utilities" (OuterVolumeSpecName: "utilities") pod "debee974-cb8b-4db6-bd4e-620b03832053" (UID: "debee974-cb8b-4db6-bd4e-620b03832053"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:07:06 crc kubenswrapper[4730]: I0930 11:07:06.913345 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/debee974-cb8b-4db6-bd4e-620b03832053-kube-api-access-f2g4r" (OuterVolumeSpecName: "kube-api-access-f2g4r") pod "debee974-cb8b-4db6-bd4e-620b03832053" (UID: "debee974-cb8b-4db6-bd4e-620b03832053"). InnerVolumeSpecName "kube-api-access-f2g4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:07:06 crc kubenswrapper[4730]: I0930 11:07:06.968034 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/debee974-cb8b-4db6-bd4e-620b03832053-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "debee974-cb8b-4db6-bd4e-620b03832053" (UID: "debee974-cb8b-4db6-bd4e-620b03832053"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.003106 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/debee974-cb8b-4db6-bd4e-620b03832053-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.003138 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/debee974-cb8b-4db6-bd4e-620b03832053-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.003150 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2g4r\" (UniqueName: \"kubernetes.io/projected/debee974-cb8b-4db6-bd4e-620b03832053-kube-api-access-f2g4r\") on node \"crc\" DevicePath \"\"" Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.329006 4730 generic.go:334] "Generic (PLEG): container finished" podID="debee974-cb8b-4db6-bd4e-620b03832053" containerID="99e569687d4bac0bee3f5ed59bc5f2961ec835e78927bfe1eface209fc420f69" exitCode=0 Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.329102 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w2796" event={"ID":"debee974-cb8b-4db6-bd4e-620b03832053","Type":"ContainerDied","Data":"99e569687d4bac0bee3f5ed59bc5f2961ec835e78927bfe1eface209fc420f69"} Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.329368 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w2796" event={"ID":"debee974-cb8b-4db6-bd4e-620b03832053","Type":"ContainerDied","Data":"28ccc005b6938d0f5310c7c605e203b61445cc650bc46bb24debe739afd614d6"} Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.329270 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w2796" Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.329394 4730 scope.go:117] "RemoveContainer" containerID="99e569687d4bac0bee3f5ed59bc5f2961ec835e78927bfe1eface209fc420f69" Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.351964 4730 scope.go:117] "RemoveContainer" containerID="046694504eb01656962804ac4e59c4a4776ead8f2dee48b0bdf6d4f2b2ab313a" Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.391091 4730 scope.go:117] "RemoveContainer" containerID="7fd09e5a505c014315b791df8da6dfe2ec42497b4afb3d948ba0b83062882b03" Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.394947 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w2796"] Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.405330 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-w2796"] Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.452208 4730 scope.go:117] "RemoveContainer" containerID="99e569687d4bac0bee3f5ed59bc5f2961ec835e78927bfe1eface209fc420f69" Sep 30 11:07:07 crc kubenswrapper[4730]: E0930 11:07:07.452950 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99e569687d4bac0bee3f5ed59bc5f2961ec835e78927bfe1eface209fc420f69\": container with ID starting with 99e569687d4bac0bee3f5ed59bc5f2961ec835e78927bfe1eface209fc420f69 not found: ID does not exist" containerID="99e569687d4bac0bee3f5ed59bc5f2961ec835e78927bfe1eface209fc420f69" Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.453011 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99e569687d4bac0bee3f5ed59bc5f2961ec835e78927bfe1eface209fc420f69"} err="failed to get container status \"99e569687d4bac0bee3f5ed59bc5f2961ec835e78927bfe1eface209fc420f69\": rpc error: code = NotFound desc = could not find container \"99e569687d4bac0bee3f5ed59bc5f2961ec835e78927bfe1eface209fc420f69\": container with ID starting with 99e569687d4bac0bee3f5ed59bc5f2961ec835e78927bfe1eface209fc420f69 not found: ID does not exist" Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.453044 4730 scope.go:117] "RemoveContainer" containerID="046694504eb01656962804ac4e59c4a4776ead8f2dee48b0bdf6d4f2b2ab313a" Sep 30 11:07:07 crc kubenswrapper[4730]: E0930 11:07:07.453524 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"046694504eb01656962804ac4e59c4a4776ead8f2dee48b0bdf6d4f2b2ab313a\": container with ID starting with 046694504eb01656962804ac4e59c4a4776ead8f2dee48b0bdf6d4f2b2ab313a not found: ID does not exist" containerID="046694504eb01656962804ac4e59c4a4776ead8f2dee48b0bdf6d4f2b2ab313a" Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.453581 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"046694504eb01656962804ac4e59c4a4776ead8f2dee48b0bdf6d4f2b2ab313a"} err="failed to get container status \"046694504eb01656962804ac4e59c4a4776ead8f2dee48b0bdf6d4f2b2ab313a\": rpc error: code = NotFound desc = could not find container \"046694504eb01656962804ac4e59c4a4776ead8f2dee48b0bdf6d4f2b2ab313a\": container with ID starting with 046694504eb01656962804ac4e59c4a4776ead8f2dee48b0bdf6d4f2b2ab313a not found: ID does not exist" Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.453671 4730 scope.go:117] "RemoveContainer" containerID="7fd09e5a505c014315b791df8da6dfe2ec42497b4afb3d948ba0b83062882b03" Sep 30 11:07:07 crc kubenswrapper[4730]: E0930 11:07:07.454898 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fd09e5a505c014315b791df8da6dfe2ec42497b4afb3d948ba0b83062882b03\": container with ID starting with 7fd09e5a505c014315b791df8da6dfe2ec42497b4afb3d948ba0b83062882b03 not found: ID does not exist" containerID="7fd09e5a505c014315b791df8da6dfe2ec42497b4afb3d948ba0b83062882b03" Sep 30 11:07:07 crc kubenswrapper[4730]: I0930 11:07:07.454924 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fd09e5a505c014315b791df8da6dfe2ec42497b4afb3d948ba0b83062882b03"} err="failed to get container status \"7fd09e5a505c014315b791df8da6dfe2ec42497b4afb3d948ba0b83062882b03\": rpc error: code = NotFound desc = could not find container \"7fd09e5a505c014315b791df8da6dfe2ec42497b4afb3d948ba0b83062882b03\": container with ID starting with 7fd09e5a505c014315b791df8da6dfe2ec42497b4afb3d948ba0b83062882b03 not found: ID does not exist" Sep 30 11:07:08 crc kubenswrapper[4730]: I0930 11:07:08.393654 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="debee974-cb8b-4db6-bd4e-620b03832053" path="/var/lib/kubelet/pods/debee974-cb8b-4db6-bd4e-620b03832053/volumes" Sep 30 11:07:32 crc kubenswrapper[4730]: I0930 11:07:32.336425 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:07:32 crc kubenswrapper[4730]: I0930 11:07:32.337001 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:08:02 crc kubenswrapper[4730]: I0930 11:08:02.337604 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:08:02 crc kubenswrapper[4730]: I0930 11:08:02.338185 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:08:23 crc kubenswrapper[4730]: E0930 11:08:23.901908 4730 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Sep 30 11:08:32 crc kubenswrapper[4730]: I0930 11:08:32.337296 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:08:32 crc kubenswrapper[4730]: I0930 11:08:32.337867 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:08:32 crc kubenswrapper[4730]: I0930 11:08:32.337906 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 11:08:32 crc kubenswrapper[4730]: I0930 11:08:32.340289 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"97fd2743ac185e4a3c882330a4b61abe84b019d15703849796b6cc4b51ad5cbd"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 11:08:32 crc kubenswrapper[4730]: I0930 11:08:32.340345 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://97fd2743ac185e4a3c882330a4b61abe84b019d15703849796b6cc4b51ad5cbd" gracePeriod=600 Sep 30 11:08:33 crc kubenswrapper[4730]: I0930 11:08:33.203892 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="97fd2743ac185e4a3c882330a4b61abe84b019d15703849796b6cc4b51ad5cbd" exitCode=0 Sep 30 11:08:33 crc kubenswrapper[4730]: I0930 11:08:33.203973 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"97fd2743ac185e4a3c882330a4b61abe84b019d15703849796b6cc4b51ad5cbd"} Sep 30 11:08:33 crc kubenswrapper[4730]: I0930 11:08:33.204450 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542"} Sep 30 11:08:33 crc kubenswrapper[4730]: I0930 11:08:33.204470 4730 scope.go:117] "RemoveContainer" containerID="beb2fbe30416770e92ddfdbfb247323ade28dbc77e27c3fc6850c673f83687b1" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.760677 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tsw2f"] Sep 30 11:08:46 crc kubenswrapper[4730]: E0930 11:08:46.762138 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a091f260-9672-424c-9924-baf3495f3ded" containerName="extract-utilities" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.762159 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a091f260-9672-424c-9924-baf3495f3ded" containerName="extract-utilities" Sep 30 11:08:46 crc kubenswrapper[4730]: E0930 11:08:46.762187 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a091f260-9672-424c-9924-baf3495f3ded" containerName="registry-server" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.762196 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a091f260-9672-424c-9924-baf3495f3ded" containerName="registry-server" Sep 30 11:08:46 crc kubenswrapper[4730]: E0930 11:08:46.762211 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="debee974-cb8b-4db6-bd4e-620b03832053" containerName="extract-utilities" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.762217 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="debee974-cb8b-4db6-bd4e-620b03832053" containerName="extract-utilities" Sep 30 11:08:46 crc kubenswrapper[4730]: E0930 11:08:46.762239 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a091f260-9672-424c-9924-baf3495f3ded" containerName="extract-content" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.762246 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="a091f260-9672-424c-9924-baf3495f3ded" containerName="extract-content" Sep 30 11:08:46 crc kubenswrapper[4730]: E0930 11:08:46.762258 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="debee974-cb8b-4db6-bd4e-620b03832053" containerName="extract-content" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.762264 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="debee974-cb8b-4db6-bd4e-620b03832053" containerName="extract-content" Sep 30 11:08:46 crc kubenswrapper[4730]: E0930 11:08:46.762276 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="debee974-cb8b-4db6-bd4e-620b03832053" containerName="registry-server" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.762283 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="debee974-cb8b-4db6-bd4e-620b03832053" containerName="registry-server" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.762505 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="a091f260-9672-424c-9924-baf3495f3ded" containerName="registry-server" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.762528 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="debee974-cb8b-4db6-bd4e-620b03832053" containerName="registry-server" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.764120 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.784159 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tsw2f"] Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.875061 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dda30bcf-f24e-4e61-a068-8361e5eb33f1-utilities\") pod \"redhat-marketplace-tsw2f\" (UID: \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\") " pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.875143 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dda30bcf-f24e-4e61-a068-8361e5eb33f1-catalog-content\") pod \"redhat-marketplace-tsw2f\" (UID: \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\") " pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.875404 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl9s5\" (UniqueName: \"kubernetes.io/projected/dda30bcf-f24e-4e61-a068-8361e5eb33f1-kube-api-access-cl9s5\") pod \"redhat-marketplace-tsw2f\" (UID: \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\") " pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.977727 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dda30bcf-f24e-4e61-a068-8361e5eb33f1-utilities\") pod \"redhat-marketplace-tsw2f\" (UID: \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\") " pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.977826 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dda30bcf-f24e-4e61-a068-8361e5eb33f1-catalog-content\") pod \"redhat-marketplace-tsw2f\" (UID: \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\") " pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.977938 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl9s5\" (UniqueName: \"kubernetes.io/projected/dda30bcf-f24e-4e61-a068-8361e5eb33f1-kube-api-access-cl9s5\") pod \"redhat-marketplace-tsw2f\" (UID: \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\") " pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.978179 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dda30bcf-f24e-4e61-a068-8361e5eb33f1-utilities\") pod \"redhat-marketplace-tsw2f\" (UID: \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\") " pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.978248 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dda30bcf-f24e-4e61-a068-8361e5eb33f1-catalog-content\") pod \"redhat-marketplace-tsw2f\" (UID: \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\") " pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:46 crc kubenswrapper[4730]: I0930 11:08:46.998259 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl9s5\" (UniqueName: \"kubernetes.io/projected/dda30bcf-f24e-4e61-a068-8361e5eb33f1-kube-api-access-cl9s5\") pod \"redhat-marketplace-tsw2f\" (UID: \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\") " pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:47 crc kubenswrapper[4730]: I0930 11:08:47.088964 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:47 crc kubenswrapper[4730]: I0930 11:08:47.540303 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tsw2f"] Sep 30 11:08:48 crc kubenswrapper[4730]: I0930 11:08:48.355955 4730 generic.go:334] "Generic (PLEG): container finished" podID="dda30bcf-f24e-4e61-a068-8361e5eb33f1" containerID="0b36124f7c7710a49366d0aa94f82cef945952ccf47e692055feac06c911fb2e" exitCode=0 Sep 30 11:08:48 crc kubenswrapper[4730]: I0930 11:08:48.356033 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tsw2f" event={"ID":"dda30bcf-f24e-4e61-a068-8361e5eb33f1","Type":"ContainerDied","Data":"0b36124f7c7710a49366d0aa94f82cef945952ccf47e692055feac06c911fb2e"} Sep 30 11:08:48 crc kubenswrapper[4730]: I0930 11:08:48.356296 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tsw2f" event={"ID":"dda30bcf-f24e-4e61-a068-8361e5eb33f1","Type":"ContainerStarted","Data":"5ede5519c78b1d315ad31d3f02bcd205a224fcb1408a205a936e5db223368766"} Sep 30 11:08:50 crc kubenswrapper[4730]: I0930 11:08:50.379696 4730 generic.go:334] "Generic (PLEG): container finished" podID="dda30bcf-f24e-4e61-a068-8361e5eb33f1" containerID="b486c659bba9f162a80b3c84a0ada8c317046d5078637cc7c9a4b8f0044b68d3" exitCode=0 Sep 30 11:08:50 crc kubenswrapper[4730]: I0930 11:08:50.395100 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tsw2f" event={"ID":"dda30bcf-f24e-4e61-a068-8361e5eb33f1","Type":"ContainerDied","Data":"b486c659bba9f162a80b3c84a0ada8c317046d5078637cc7c9a4b8f0044b68d3"} Sep 30 11:08:51 crc kubenswrapper[4730]: I0930 11:08:51.392543 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tsw2f" event={"ID":"dda30bcf-f24e-4e61-a068-8361e5eb33f1","Type":"ContainerStarted","Data":"b04ee0725e1b8ecede01214e0e9e9b87fc5eeebc202317ef7fcfea65f1ec35b4"} Sep 30 11:08:51 crc kubenswrapper[4730]: I0930 11:08:51.427311 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tsw2f" podStartSLOduration=2.78484185 podStartE2EDuration="5.427285654s" podCreationTimestamp="2025-09-30 11:08:46 +0000 UTC" firstStartedPulling="2025-09-30 11:08:48.358737751 +0000 UTC m=+4772.691997764" lastFinishedPulling="2025-09-30 11:08:51.001181575 +0000 UTC m=+4775.334441568" observedRunningTime="2025-09-30 11:08:51.422816427 +0000 UTC m=+4775.756076450" watchObservedRunningTime="2025-09-30 11:08:51.427285654 +0000 UTC m=+4775.760545657" Sep 30 11:08:57 crc kubenswrapper[4730]: I0930 11:08:57.090093 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:57 crc kubenswrapper[4730]: I0930 11:08:57.090391 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:57 crc kubenswrapper[4730]: I0930 11:08:57.146707 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:57 crc kubenswrapper[4730]: I0930 11:08:57.524024 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:08:58 crc kubenswrapper[4730]: I0930 11:08:58.744654 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tsw2f"] Sep 30 11:08:59 crc kubenswrapper[4730]: I0930 11:08:59.477884 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tsw2f" podUID="dda30bcf-f24e-4e61-a068-8361e5eb33f1" containerName="registry-server" containerID="cri-o://b04ee0725e1b8ecede01214e0e9e9b87fc5eeebc202317ef7fcfea65f1ec35b4" gracePeriod=2 Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.064941 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.147183 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dda30bcf-f24e-4e61-a068-8361e5eb33f1-utilities\") pod \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\" (UID: \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\") " Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.147307 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dda30bcf-f24e-4e61-a068-8361e5eb33f1-catalog-content\") pod \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\" (UID: \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\") " Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.147414 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl9s5\" (UniqueName: \"kubernetes.io/projected/dda30bcf-f24e-4e61-a068-8361e5eb33f1-kube-api-access-cl9s5\") pod \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\" (UID: \"dda30bcf-f24e-4e61-a068-8361e5eb33f1\") " Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.148646 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dda30bcf-f24e-4e61-a068-8361e5eb33f1-utilities" (OuterVolumeSpecName: "utilities") pod "dda30bcf-f24e-4e61-a068-8361e5eb33f1" (UID: "dda30bcf-f24e-4e61-a068-8361e5eb33f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.159854 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dda30bcf-f24e-4e61-a068-8361e5eb33f1-kube-api-access-cl9s5" (OuterVolumeSpecName: "kube-api-access-cl9s5") pod "dda30bcf-f24e-4e61-a068-8361e5eb33f1" (UID: "dda30bcf-f24e-4e61-a068-8361e5eb33f1"). InnerVolumeSpecName "kube-api-access-cl9s5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.247801 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dda30bcf-f24e-4e61-a068-8361e5eb33f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dda30bcf-f24e-4e61-a068-8361e5eb33f1" (UID: "dda30bcf-f24e-4e61-a068-8361e5eb33f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.250041 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dda30bcf-f24e-4e61-a068-8361e5eb33f1-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.250064 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dda30bcf-f24e-4e61-a068-8361e5eb33f1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.250076 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl9s5\" (UniqueName: \"kubernetes.io/projected/dda30bcf-f24e-4e61-a068-8361e5eb33f1-kube-api-access-cl9s5\") on node \"crc\" DevicePath \"\"" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.488818 4730 generic.go:334] "Generic (PLEG): container finished" podID="dda30bcf-f24e-4e61-a068-8361e5eb33f1" containerID="b04ee0725e1b8ecede01214e0e9e9b87fc5eeebc202317ef7fcfea65f1ec35b4" exitCode=0 Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.488864 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tsw2f" event={"ID":"dda30bcf-f24e-4e61-a068-8361e5eb33f1","Type":"ContainerDied","Data":"b04ee0725e1b8ecede01214e0e9e9b87fc5eeebc202317ef7fcfea65f1ec35b4"} Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.488883 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tsw2f" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.488892 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tsw2f" event={"ID":"dda30bcf-f24e-4e61-a068-8361e5eb33f1","Type":"ContainerDied","Data":"5ede5519c78b1d315ad31d3f02bcd205a224fcb1408a205a936e5db223368766"} Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.488915 4730 scope.go:117] "RemoveContainer" containerID="b04ee0725e1b8ecede01214e0e9e9b87fc5eeebc202317ef7fcfea65f1ec35b4" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.508476 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tsw2f"] Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.509226 4730 scope.go:117] "RemoveContainer" containerID="b486c659bba9f162a80b3c84a0ada8c317046d5078637cc7c9a4b8f0044b68d3" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.516937 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tsw2f"] Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.535450 4730 scope.go:117] "RemoveContainer" containerID="0b36124f7c7710a49366d0aa94f82cef945952ccf47e692055feac06c911fb2e" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.588385 4730 scope.go:117] "RemoveContainer" containerID="b04ee0725e1b8ecede01214e0e9e9b87fc5eeebc202317ef7fcfea65f1ec35b4" Sep 30 11:09:00 crc kubenswrapper[4730]: E0930 11:09:00.588945 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b04ee0725e1b8ecede01214e0e9e9b87fc5eeebc202317ef7fcfea65f1ec35b4\": container with ID starting with b04ee0725e1b8ecede01214e0e9e9b87fc5eeebc202317ef7fcfea65f1ec35b4 not found: ID does not exist" containerID="b04ee0725e1b8ecede01214e0e9e9b87fc5eeebc202317ef7fcfea65f1ec35b4" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.588979 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b04ee0725e1b8ecede01214e0e9e9b87fc5eeebc202317ef7fcfea65f1ec35b4"} err="failed to get container status \"b04ee0725e1b8ecede01214e0e9e9b87fc5eeebc202317ef7fcfea65f1ec35b4\": rpc error: code = NotFound desc = could not find container \"b04ee0725e1b8ecede01214e0e9e9b87fc5eeebc202317ef7fcfea65f1ec35b4\": container with ID starting with b04ee0725e1b8ecede01214e0e9e9b87fc5eeebc202317ef7fcfea65f1ec35b4 not found: ID does not exist" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.589002 4730 scope.go:117] "RemoveContainer" containerID="b486c659bba9f162a80b3c84a0ada8c317046d5078637cc7c9a4b8f0044b68d3" Sep 30 11:09:00 crc kubenswrapper[4730]: E0930 11:09:00.589254 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b486c659bba9f162a80b3c84a0ada8c317046d5078637cc7c9a4b8f0044b68d3\": container with ID starting with b486c659bba9f162a80b3c84a0ada8c317046d5078637cc7c9a4b8f0044b68d3 not found: ID does not exist" containerID="b486c659bba9f162a80b3c84a0ada8c317046d5078637cc7c9a4b8f0044b68d3" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.589290 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b486c659bba9f162a80b3c84a0ada8c317046d5078637cc7c9a4b8f0044b68d3"} err="failed to get container status \"b486c659bba9f162a80b3c84a0ada8c317046d5078637cc7c9a4b8f0044b68d3\": rpc error: code = NotFound desc = could not find container \"b486c659bba9f162a80b3c84a0ada8c317046d5078637cc7c9a4b8f0044b68d3\": container with ID starting with b486c659bba9f162a80b3c84a0ada8c317046d5078637cc7c9a4b8f0044b68d3 not found: ID does not exist" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.589305 4730 scope.go:117] "RemoveContainer" containerID="0b36124f7c7710a49366d0aa94f82cef945952ccf47e692055feac06c911fb2e" Sep 30 11:09:00 crc kubenswrapper[4730]: E0930 11:09:00.589487 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b36124f7c7710a49366d0aa94f82cef945952ccf47e692055feac06c911fb2e\": container with ID starting with 0b36124f7c7710a49366d0aa94f82cef945952ccf47e692055feac06c911fb2e not found: ID does not exist" containerID="0b36124f7c7710a49366d0aa94f82cef945952ccf47e692055feac06c911fb2e" Sep 30 11:09:00 crc kubenswrapper[4730]: I0930 11:09:00.589507 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b36124f7c7710a49366d0aa94f82cef945952ccf47e692055feac06c911fb2e"} err="failed to get container status \"0b36124f7c7710a49366d0aa94f82cef945952ccf47e692055feac06c911fb2e\": rpc error: code = NotFound desc = could not find container \"0b36124f7c7710a49366d0aa94f82cef945952ccf47e692055feac06c911fb2e\": container with ID starting with 0b36124f7c7710a49366d0aa94f82cef945952ccf47e692055feac06c911fb2e not found: ID does not exist" Sep 30 11:09:02 crc kubenswrapper[4730]: I0930 11:09:02.398068 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dda30bcf-f24e-4e61-a068-8361e5eb33f1" path="/var/lib/kubelet/pods/dda30bcf-f24e-4e61-a068-8361e5eb33f1/volumes" Sep 30 11:10:32 crc kubenswrapper[4730]: I0930 11:10:32.336803 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:10:32 crc kubenswrapper[4730]: I0930 11:10:32.337649 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:11:02 crc kubenswrapper[4730]: I0930 11:11:02.336651 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:11:02 crc kubenswrapper[4730]: I0930 11:11:02.337207 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:11:32 crc kubenswrapper[4730]: I0930 11:11:32.337246 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:11:32 crc kubenswrapper[4730]: I0930 11:11:32.337868 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:11:32 crc kubenswrapper[4730]: I0930 11:11:32.337924 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 11:11:32 crc kubenswrapper[4730]: I0930 11:11:32.338812 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 11:11:32 crc kubenswrapper[4730]: I0930 11:11:32.338884 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" gracePeriod=600 Sep 30 11:11:32 crc kubenswrapper[4730]: E0930 11:11:32.465998 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:11:33 crc kubenswrapper[4730]: I0930 11:11:33.198149 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" exitCode=0 Sep 30 11:11:33 crc kubenswrapper[4730]: I0930 11:11:33.198222 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542"} Sep 30 11:11:33 crc kubenswrapper[4730]: I0930 11:11:33.198416 4730 scope.go:117] "RemoveContainer" containerID="97fd2743ac185e4a3c882330a4b61abe84b019d15703849796b6cc4b51ad5cbd" Sep 30 11:11:33 crc kubenswrapper[4730]: I0930 11:11:33.199248 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:11:33 crc kubenswrapper[4730]: E0930 11:11:33.199541 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:11:43 crc kubenswrapper[4730]: I0930 11:11:43.381141 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:11:43 crc kubenswrapper[4730]: E0930 11:11:43.382006 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:11:55 crc kubenswrapper[4730]: I0930 11:11:55.381555 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:11:55 crc kubenswrapper[4730]: E0930 11:11:55.382769 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:12:08 crc kubenswrapper[4730]: I0930 11:12:08.384147 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:12:08 crc kubenswrapper[4730]: E0930 11:12:08.385325 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:12:23 crc kubenswrapper[4730]: I0930 11:12:23.380580 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:12:23 crc kubenswrapper[4730]: E0930 11:12:23.381403 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:12:36 crc kubenswrapper[4730]: I0930 11:12:36.391720 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:12:36 crc kubenswrapper[4730]: E0930 11:12:36.392792 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:12:51 crc kubenswrapper[4730]: I0930 11:12:51.381809 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:12:51 crc kubenswrapper[4730]: E0930 11:12:51.383407 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:13:02 crc kubenswrapper[4730]: I0930 11:13:02.381419 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:13:02 crc kubenswrapper[4730]: E0930 11:13:02.382514 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:13:17 crc kubenswrapper[4730]: I0930 11:13:17.382100 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:13:17 crc kubenswrapper[4730]: E0930 11:13:17.383171 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:13:28 crc kubenswrapper[4730]: I0930 11:13:28.381160 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:13:28 crc kubenswrapper[4730]: E0930 11:13:28.382060 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:13:42 crc kubenswrapper[4730]: I0930 11:13:42.383092 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:13:42 crc kubenswrapper[4730]: E0930 11:13:42.383955 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:13:57 crc kubenswrapper[4730]: I0930 11:13:57.383711 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:13:57 crc kubenswrapper[4730]: E0930 11:13:57.384748 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:14:11 crc kubenswrapper[4730]: I0930 11:14:11.381541 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:14:11 crc kubenswrapper[4730]: E0930 11:14:11.382741 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:14:23 crc kubenswrapper[4730]: I0930 11:14:23.381277 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:14:23 crc kubenswrapper[4730]: E0930 11:14:23.382714 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:14:34 crc kubenswrapper[4730]: I0930 11:14:34.383176 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:14:34 crc kubenswrapper[4730]: E0930 11:14:34.384043 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:14:48 crc kubenswrapper[4730]: E0930 11:14:48.370565 4730 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.104:41538->38.102.83.104:41011: write tcp 38.102.83.104:41538->38.102.83.104:41011: write: broken pipe Sep 30 11:14:49 crc kubenswrapper[4730]: I0930 11:14:49.381361 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:14:49 crc kubenswrapper[4730]: E0930 11:14:49.381943 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:14:55 crc kubenswrapper[4730]: E0930 11:14:55.847931 4730 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.104:41778->38.102.83.104:41011: write tcp 38.102.83.104:41778->38.102.83.104:41011: write: broken pipe Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.176908 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j"] Sep 30 11:15:00 crc kubenswrapper[4730]: E0930 11:15:00.177815 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dda30bcf-f24e-4e61-a068-8361e5eb33f1" containerName="extract-utilities" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.177832 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="dda30bcf-f24e-4e61-a068-8361e5eb33f1" containerName="extract-utilities" Sep 30 11:15:00 crc kubenswrapper[4730]: E0930 11:15:00.177844 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dda30bcf-f24e-4e61-a068-8361e5eb33f1" containerName="registry-server" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.177852 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="dda30bcf-f24e-4e61-a068-8361e5eb33f1" containerName="registry-server" Sep 30 11:15:00 crc kubenswrapper[4730]: E0930 11:15:00.177875 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dda30bcf-f24e-4e61-a068-8361e5eb33f1" containerName="extract-content" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.177884 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="dda30bcf-f24e-4e61-a068-8361e5eb33f1" containerName="extract-content" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.178162 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="dda30bcf-f24e-4e61-a068-8361e5eb33f1" containerName="registry-server" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.178883 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.182086 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.182293 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.187639 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j"] Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.248006 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p59k\" (UniqueName: \"kubernetes.io/projected/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-kube-api-access-2p59k\") pod \"collect-profiles-29320515-92p6j\" (UID: \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.248411 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-secret-volume\") pod \"collect-profiles-29320515-92p6j\" (UID: \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.248443 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-config-volume\") pod \"collect-profiles-29320515-92p6j\" (UID: \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.349848 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-secret-volume\") pod \"collect-profiles-29320515-92p6j\" (UID: \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.349897 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-config-volume\") pod \"collect-profiles-29320515-92p6j\" (UID: \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.349965 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p59k\" (UniqueName: \"kubernetes.io/projected/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-kube-api-access-2p59k\") pod \"collect-profiles-29320515-92p6j\" (UID: \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.350920 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-config-volume\") pod \"collect-profiles-29320515-92p6j\" (UID: \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.357693 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-secret-volume\") pod \"collect-profiles-29320515-92p6j\" (UID: \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.374447 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p59k\" (UniqueName: \"kubernetes.io/projected/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-kube-api-access-2p59k\") pod \"collect-profiles-29320515-92p6j\" (UID: \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" Sep 30 11:15:00 crc kubenswrapper[4730]: I0930 11:15:00.519813 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" Sep 30 11:15:01 crc kubenswrapper[4730]: I0930 11:15:01.018263 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j"] Sep 30 11:15:01 crc kubenswrapper[4730]: I0930 11:15:01.645004 4730 generic.go:334] "Generic (PLEG): container finished" podID="4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761" containerID="9f66e59fd2523f96b2881705fc66797ade0cdc83e8275b1751e4242b218c6de3" exitCode=0 Sep 30 11:15:01 crc kubenswrapper[4730]: I0930 11:15:01.645218 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" event={"ID":"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761","Type":"ContainerDied","Data":"9f66e59fd2523f96b2881705fc66797ade0cdc83e8275b1751e4242b218c6de3"} Sep 30 11:15:01 crc kubenswrapper[4730]: I0930 11:15:01.645242 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" event={"ID":"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761","Type":"ContainerStarted","Data":"978805cbd1d6e93b954bf6bac8bb1b5bba22a8513b0b3072e53a5cd94689afe4"} Sep 30 11:15:02 crc kubenswrapper[4730]: I0930 11:15:02.380525 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:15:02 crc kubenswrapper[4730]: E0930 11:15:02.380892 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.069428 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.233543 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-secret-volume\") pod \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\" (UID: \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\") " Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.233864 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2p59k\" (UniqueName: \"kubernetes.io/projected/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-kube-api-access-2p59k\") pod \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\" (UID: \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\") " Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.233973 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-config-volume\") pod \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\" (UID: \"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761\") " Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.234638 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-config-volume" (OuterVolumeSpecName: "config-volume") pod "4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761" (UID: "4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.240055 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761" (UID: "4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.240944 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-kube-api-access-2p59k" (OuterVolumeSpecName: "kube-api-access-2p59k") pod "4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761" (UID: "4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761"). InnerVolumeSpecName "kube-api-access-2p59k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.337089 4730 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.337292 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2p59k\" (UniqueName: \"kubernetes.io/projected/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-kube-api-access-2p59k\") on node \"crc\" DevicePath \"\"" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.337393 4730 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.662593 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" event={"ID":"4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761","Type":"ContainerDied","Data":"978805cbd1d6e93b954bf6bac8bb1b5bba22a8513b0b3072e53a5cd94689afe4"} Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.662665 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="978805cbd1d6e93b954bf6bac8bb1b5bba22a8513b0b3072e53a5cd94689afe4" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.662695 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320515-92p6j" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.930987 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7ks6c"] Sep 30 11:15:03 crc kubenswrapper[4730]: E0930 11:15:03.931493 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761" containerName="collect-profiles" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.931517 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761" containerName="collect-profiles" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.931807 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cb154ce-c68d-4b5f-bd7b-3bf3c00e0761" containerName="collect-profiles" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.935151 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:03 crc kubenswrapper[4730]: I0930 11:15:03.943342 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7ks6c"] Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.050502 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtcjd\" (UniqueName: \"kubernetes.io/projected/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-kube-api-access-gtcjd\") pod \"redhat-operators-7ks6c\" (UID: \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\") " pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.050807 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-catalog-content\") pod \"redhat-operators-7ks6c\" (UID: \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\") " pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.050924 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-utilities\") pod \"redhat-operators-7ks6c\" (UID: \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\") " pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.152646 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-utilities\") pod \"redhat-operators-7ks6c\" (UID: \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\") " pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.152910 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtcjd\" (UniqueName: \"kubernetes.io/projected/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-kube-api-access-gtcjd\") pod \"redhat-operators-7ks6c\" (UID: \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\") " pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.153087 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-catalog-content\") pod \"redhat-operators-7ks6c\" (UID: \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\") " pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.153320 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-utilities\") pod \"redhat-operators-7ks6c\" (UID: \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\") " pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.153600 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-catalog-content\") pod \"redhat-operators-7ks6c\" (UID: \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\") " pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.161376 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k"] Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.169771 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320470-zzt2k"] Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.174338 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtcjd\" (UniqueName: \"kubernetes.io/projected/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-kube-api-access-gtcjd\") pod \"redhat-operators-7ks6c\" (UID: \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\") " pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.259180 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.405501 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d920f51a-11f4-408a-a55c-2a3ee16173da" path="/var/lib/kubelet/pods/d920f51a-11f4-408a-a55c-2a3ee16173da/volumes" Sep 30 11:15:04 crc kubenswrapper[4730]: I0930 11:15:04.768126 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7ks6c"] Sep 30 11:15:05 crc kubenswrapper[4730]: I0930 11:15:05.686888 4730 generic.go:334] "Generic (PLEG): container finished" podID="2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" containerID="74badf240aa335da75eca14449aaa0ca88004a80c9f830acbaaaa5b7b4a0b9a4" exitCode=0 Sep 30 11:15:05 crc kubenswrapper[4730]: I0930 11:15:05.686994 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ks6c" event={"ID":"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b","Type":"ContainerDied","Data":"74badf240aa335da75eca14449aaa0ca88004a80c9f830acbaaaa5b7b4a0b9a4"} Sep 30 11:15:05 crc kubenswrapper[4730]: I0930 11:15:05.687254 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ks6c" event={"ID":"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b","Type":"ContainerStarted","Data":"1fc88efc2dbfca35fe41cd44ab619a5830fc15f903be73c7a676d12ac1df1b88"} Sep 30 11:15:05 crc kubenswrapper[4730]: I0930 11:15:05.689361 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 11:15:06 crc kubenswrapper[4730]: I0930 11:15:06.702877 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ks6c" event={"ID":"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b","Type":"ContainerStarted","Data":"1bb3e6ace4041138f67325d837f4b95d6610ed25de1de85b0313b964af01101f"} Sep 30 11:15:10 crc kubenswrapper[4730]: I0930 11:15:10.758140 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ks6c" event={"ID":"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b","Type":"ContainerDied","Data":"1bb3e6ace4041138f67325d837f4b95d6610ed25de1de85b0313b964af01101f"} Sep 30 11:15:10 crc kubenswrapper[4730]: I0930 11:15:10.758091 4730 generic.go:334] "Generic (PLEG): container finished" podID="2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" containerID="1bb3e6ace4041138f67325d837f4b95d6610ed25de1de85b0313b964af01101f" exitCode=0 Sep 30 11:15:12 crc kubenswrapper[4730]: I0930 11:15:12.781551 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ks6c" event={"ID":"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b","Type":"ContainerStarted","Data":"f2807abcbfb9671ebd7b7e31fc84aed5ffa656fa53c10fcc7ff30fc65d2e400a"} Sep 30 11:15:12 crc kubenswrapper[4730]: I0930 11:15:12.802381 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7ks6c" podStartSLOduration=3.905568816 podStartE2EDuration="9.802363863s" podCreationTimestamp="2025-09-30 11:15:03 +0000 UTC" firstStartedPulling="2025-09-30 11:15:05.689035122 +0000 UTC m=+5150.022295115" lastFinishedPulling="2025-09-30 11:15:11.585830149 +0000 UTC m=+5155.919090162" observedRunningTime="2025-09-30 11:15:12.79920858 +0000 UTC m=+5157.132468583" watchObservedRunningTime="2025-09-30 11:15:12.802363863 +0000 UTC m=+5157.135623866" Sep 30 11:15:14 crc kubenswrapper[4730]: I0930 11:15:14.259433 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:14 crc kubenswrapper[4730]: I0930 11:15:14.259945 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:15 crc kubenswrapper[4730]: I0930 11:15:15.311666 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7ks6c" podUID="2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" containerName="registry-server" probeResult="failure" output=< Sep 30 11:15:15 crc kubenswrapper[4730]: timeout: failed to connect service ":50051" within 1s Sep 30 11:15:15 crc kubenswrapper[4730]: > Sep 30 11:15:16 crc kubenswrapper[4730]: I0930 11:15:16.393904 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:15:16 crc kubenswrapper[4730]: E0930 11:15:16.394504 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:15:24 crc kubenswrapper[4730]: I0930 11:15:24.317392 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:24 crc kubenswrapper[4730]: I0930 11:15:24.451850 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:24 crc kubenswrapper[4730]: I0930 11:15:24.581375 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7ks6c"] Sep 30 11:15:25 crc kubenswrapper[4730]: I0930 11:15:25.900287 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7ks6c" podUID="2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" containerName="registry-server" containerID="cri-o://f2807abcbfb9671ebd7b7e31fc84aed5ffa656fa53c10fcc7ff30fc65d2e400a" gracePeriod=2 Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.441593 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.545073 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-catalog-content\") pod \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\" (UID: \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\") " Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.545251 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtcjd\" (UniqueName: \"kubernetes.io/projected/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-kube-api-access-gtcjd\") pod \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\" (UID: \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\") " Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.545329 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-utilities\") pod \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\" (UID: \"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b\") " Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.546421 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-utilities" (OuterVolumeSpecName: "utilities") pod "2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" (UID: "2a822e96-1c06-4afa-9d9d-85a1bf11ac8b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.546681 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.552409 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-kube-api-access-gtcjd" (OuterVolumeSpecName: "kube-api-access-gtcjd") pod "2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" (UID: "2a822e96-1c06-4afa-9d9d-85a1bf11ac8b"). InnerVolumeSpecName "kube-api-access-gtcjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.628157 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" (UID: "2a822e96-1c06-4afa-9d9d-85a1bf11ac8b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.648816 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtcjd\" (UniqueName: \"kubernetes.io/projected/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-kube-api-access-gtcjd\") on node \"crc\" DevicePath \"\"" Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.648870 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.913779 4730 generic.go:334] "Generic (PLEG): container finished" podID="2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" containerID="f2807abcbfb9671ebd7b7e31fc84aed5ffa656fa53c10fcc7ff30fc65d2e400a" exitCode=0 Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.913837 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ks6c" event={"ID":"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b","Type":"ContainerDied","Data":"f2807abcbfb9671ebd7b7e31fc84aed5ffa656fa53c10fcc7ff30fc65d2e400a"} Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.913849 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7ks6c" Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.913870 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ks6c" event={"ID":"2a822e96-1c06-4afa-9d9d-85a1bf11ac8b","Type":"ContainerDied","Data":"1fc88efc2dbfca35fe41cd44ab619a5830fc15f903be73c7a676d12ac1df1b88"} Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.913892 4730 scope.go:117] "RemoveContainer" containerID="f2807abcbfb9671ebd7b7e31fc84aed5ffa656fa53c10fcc7ff30fc65d2e400a" Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.938311 4730 scope.go:117] "RemoveContainer" containerID="1bb3e6ace4041138f67325d837f4b95d6610ed25de1de85b0313b964af01101f" Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.957245 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7ks6c"] Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.968955 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7ks6c"] Sep 30 11:15:26 crc kubenswrapper[4730]: I0930 11:15:26.988335 4730 scope.go:117] "RemoveContainer" containerID="74badf240aa335da75eca14449aaa0ca88004a80c9f830acbaaaa5b7b4a0b9a4" Sep 30 11:15:27 crc kubenswrapper[4730]: I0930 11:15:27.016527 4730 scope.go:117] "RemoveContainer" containerID="f2807abcbfb9671ebd7b7e31fc84aed5ffa656fa53c10fcc7ff30fc65d2e400a" Sep 30 11:15:27 crc kubenswrapper[4730]: E0930 11:15:27.017741 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2807abcbfb9671ebd7b7e31fc84aed5ffa656fa53c10fcc7ff30fc65d2e400a\": container with ID starting with f2807abcbfb9671ebd7b7e31fc84aed5ffa656fa53c10fcc7ff30fc65d2e400a not found: ID does not exist" containerID="f2807abcbfb9671ebd7b7e31fc84aed5ffa656fa53c10fcc7ff30fc65d2e400a" Sep 30 11:15:27 crc kubenswrapper[4730]: I0930 11:15:27.017773 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2807abcbfb9671ebd7b7e31fc84aed5ffa656fa53c10fcc7ff30fc65d2e400a"} err="failed to get container status \"f2807abcbfb9671ebd7b7e31fc84aed5ffa656fa53c10fcc7ff30fc65d2e400a\": rpc error: code = NotFound desc = could not find container \"f2807abcbfb9671ebd7b7e31fc84aed5ffa656fa53c10fcc7ff30fc65d2e400a\": container with ID starting with f2807abcbfb9671ebd7b7e31fc84aed5ffa656fa53c10fcc7ff30fc65d2e400a not found: ID does not exist" Sep 30 11:15:27 crc kubenswrapper[4730]: I0930 11:15:27.017794 4730 scope.go:117] "RemoveContainer" containerID="1bb3e6ace4041138f67325d837f4b95d6610ed25de1de85b0313b964af01101f" Sep 30 11:15:27 crc kubenswrapper[4730]: E0930 11:15:27.018015 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bb3e6ace4041138f67325d837f4b95d6610ed25de1de85b0313b964af01101f\": container with ID starting with 1bb3e6ace4041138f67325d837f4b95d6610ed25de1de85b0313b964af01101f not found: ID does not exist" containerID="1bb3e6ace4041138f67325d837f4b95d6610ed25de1de85b0313b964af01101f" Sep 30 11:15:27 crc kubenswrapper[4730]: I0930 11:15:27.018033 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bb3e6ace4041138f67325d837f4b95d6610ed25de1de85b0313b964af01101f"} err="failed to get container status \"1bb3e6ace4041138f67325d837f4b95d6610ed25de1de85b0313b964af01101f\": rpc error: code = NotFound desc = could not find container \"1bb3e6ace4041138f67325d837f4b95d6610ed25de1de85b0313b964af01101f\": container with ID starting with 1bb3e6ace4041138f67325d837f4b95d6610ed25de1de85b0313b964af01101f not found: ID does not exist" Sep 30 11:15:27 crc kubenswrapper[4730]: I0930 11:15:27.018044 4730 scope.go:117] "RemoveContainer" containerID="74badf240aa335da75eca14449aaa0ca88004a80c9f830acbaaaa5b7b4a0b9a4" Sep 30 11:15:27 crc kubenswrapper[4730]: E0930 11:15:27.018264 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74badf240aa335da75eca14449aaa0ca88004a80c9f830acbaaaa5b7b4a0b9a4\": container with ID starting with 74badf240aa335da75eca14449aaa0ca88004a80c9f830acbaaaa5b7b4a0b9a4 not found: ID does not exist" containerID="74badf240aa335da75eca14449aaa0ca88004a80c9f830acbaaaa5b7b4a0b9a4" Sep 30 11:15:27 crc kubenswrapper[4730]: I0930 11:15:27.018290 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74badf240aa335da75eca14449aaa0ca88004a80c9f830acbaaaa5b7b4a0b9a4"} err="failed to get container status \"74badf240aa335da75eca14449aaa0ca88004a80c9f830acbaaaa5b7b4a0b9a4\": rpc error: code = NotFound desc = could not find container \"74badf240aa335da75eca14449aaa0ca88004a80c9f830acbaaaa5b7b4a0b9a4\": container with ID starting with 74badf240aa335da75eca14449aaa0ca88004a80c9f830acbaaaa5b7b4a0b9a4 not found: ID does not exist" Sep 30 11:15:28 crc kubenswrapper[4730]: I0930 11:15:28.399699 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" path="/var/lib/kubelet/pods/2a822e96-1c06-4afa-9d9d-85a1bf11ac8b/volumes" Sep 30 11:15:30 crc kubenswrapper[4730]: I0930 11:15:30.381411 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:15:30 crc kubenswrapper[4730]: E0930 11:15:30.382150 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:15:32 crc kubenswrapper[4730]: I0930 11:15:32.475541 4730 scope.go:117] "RemoveContainer" containerID="fa43702e2616338e29537c06c8d80c2120be826d7694836cfe65017edb9c8735" Sep 30 11:15:43 crc kubenswrapper[4730]: I0930 11:15:43.381384 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:15:43 crc kubenswrapper[4730]: E0930 11:15:43.382077 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:15:56 crc kubenswrapper[4730]: I0930 11:15:56.388486 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:15:56 crc kubenswrapper[4730]: E0930 11:15:56.389758 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:16:07 crc kubenswrapper[4730]: I0930 11:16:07.392257 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:16:07 crc kubenswrapper[4730]: E0930 11:16:07.395232 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:16:21 crc kubenswrapper[4730]: I0930 11:16:21.381718 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:16:21 crc kubenswrapper[4730]: E0930 11:16:21.382363 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:16:35 crc kubenswrapper[4730]: I0930 11:16:35.381321 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:16:36 crc kubenswrapper[4730]: I0930 11:16:36.640869 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"971ed7b5b2b9c54a39bf2857f1ba0c9a9fbe91b959a068fc5eea3cfdab95f171"} Sep 30 11:17:45 crc kubenswrapper[4730]: I0930 11:17:45.934846 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pvdnc"] Sep 30 11:17:45 crc kubenswrapper[4730]: E0930 11:17:45.936072 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" containerName="registry-server" Sep 30 11:17:45 crc kubenswrapper[4730]: I0930 11:17:45.936092 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" containerName="registry-server" Sep 30 11:17:45 crc kubenswrapper[4730]: E0930 11:17:45.936133 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" containerName="extract-content" Sep 30 11:17:45 crc kubenswrapper[4730]: I0930 11:17:45.936144 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" containerName="extract-content" Sep 30 11:17:45 crc kubenswrapper[4730]: E0930 11:17:45.936165 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" containerName="extract-utilities" Sep 30 11:17:45 crc kubenswrapper[4730]: I0930 11:17:45.936174 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" containerName="extract-utilities" Sep 30 11:17:45 crc kubenswrapper[4730]: I0930 11:17:45.936426 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a822e96-1c06-4afa-9d9d-85a1bf11ac8b" containerName="registry-server" Sep 30 11:17:45 crc kubenswrapper[4730]: I0930 11:17:45.938538 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:45 crc kubenswrapper[4730]: I0930 11:17:45.945302 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pvdnc"] Sep 30 11:17:46 crc kubenswrapper[4730]: I0930 11:17:46.062504 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5fvg\" (UniqueName: \"kubernetes.io/projected/c4c56beb-c750-4359-acfa-f2b056e8c8ad-kube-api-access-r5fvg\") pod \"community-operators-pvdnc\" (UID: \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\") " pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:46 crc kubenswrapper[4730]: I0930 11:17:46.062918 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4c56beb-c750-4359-acfa-f2b056e8c8ad-catalog-content\") pod \"community-operators-pvdnc\" (UID: \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\") " pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:46 crc kubenswrapper[4730]: I0930 11:17:46.063108 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4c56beb-c750-4359-acfa-f2b056e8c8ad-utilities\") pod \"community-operators-pvdnc\" (UID: \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\") " pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:46 crc kubenswrapper[4730]: I0930 11:17:46.165244 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4c56beb-c750-4359-acfa-f2b056e8c8ad-utilities\") pod \"community-operators-pvdnc\" (UID: \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\") " pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:46 crc kubenswrapper[4730]: I0930 11:17:46.165388 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5fvg\" (UniqueName: \"kubernetes.io/projected/c4c56beb-c750-4359-acfa-f2b056e8c8ad-kube-api-access-r5fvg\") pod \"community-operators-pvdnc\" (UID: \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\") " pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:46 crc kubenswrapper[4730]: I0930 11:17:46.165475 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4c56beb-c750-4359-acfa-f2b056e8c8ad-catalog-content\") pod \"community-operators-pvdnc\" (UID: \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\") " pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:46 crc kubenswrapper[4730]: I0930 11:17:46.165822 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4c56beb-c750-4359-acfa-f2b056e8c8ad-catalog-content\") pod \"community-operators-pvdnc\" (UID: \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\") " pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:46 crc kubenswrapper[4730]: I0930 11:17:46.165825 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4c56beb-c750-4359-acfa-f2b056e8c8ad-utilities\") pod \"community-operators-pvdnc\" (UID: \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\") " pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:46 crc kubenswrapper[4730]: I0930 11:17:46.185644 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5fvg\" (UniqueName: \"kubernetes.io/projected/c4c56beb-c750-4359-acfa-f2b056e8c8ad-kube-api-access-r5fvg\") pod \"community-operators-pvdnc\" (UID: \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\") " pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:46 crc kubenswrapper[4730]: I0930 11:17:46.264201 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:46 crc kubenswrapper[4730]: I0930 11:17:46.833905 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pvdnc"] Sep 30 11:17:46 crc kubenswrapper[4730]: W0930 11:17:46.840254 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4c56beb_c750_4359_acfa_f2b056e8c8ad.slice/crio-69ccdf70755d7d7b912bfcbbab63531134282e4e05e188ef382ce2236702d2cd WatchSource:0}: Error finding container 69ccdf70755d7d7b912bfcbbab63531134282e4e05e188ef382ce2236702d2cd: Status 404 returned error can't find the container with id 69ccdf70755d7d7b912bfcbbab63531134282e4e05e188ef382ce2236702d2cd Sep 30 11:17:47 crc kubenswrapper[4730]: I0930 11:17:47.418735 4730 generic.go:334] "Generic (PLEG): container finished" podID="c4c56beb-c750-4359-acfa-f2b056e8c8ad" containerID="e3f52d17f3eadfda0525ceb797377693f17ee2172d20682d72220f00b3c2759c" exitCode=0 Sep 30 11:17:47 crc kubenswrapper[4730]: I0930 11:17:47.418786 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdnc" event={"ID":"c4c56beb-c750-4359-acfa-f2b056e8c8ad","Type":"ContainerDied","Data":"e3f52d17f3eadfda0525ceb797377693f17ee2172d20682d72220f00b3c2759c"} Sep 30 11:17:47 crc kubenswrapper[4730]: I0930 11:17:47.418831 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdnc" event={"ID":"c4c56beb-c750-4359-acfa-f2b056e8c8ad","Type":"ContainerStarted","Data":"69ccdf70755d7d7b912bfcbbab63531134282e4e05e188ef382ce2236702d2cd"} Sep 30 11:17:48 crc kubenswrapper[4730]: I0930 11:17:48.435918 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdnc" event={"ID":"c4c56beb-c750-4359-acfa-f2b056e8c8ad","Type":"ContainerStarted","Data":"ac86dd5d4569c163a43ae96800556b27566bb068a9db724d6c483053cf2b565d"} Sep 30 11:17:49 crc kubenswrapper[4730]: I0930 11:17:49.448563 4730 generic.go:334] "Generic (PLEG): container finished" podID="c4c56beb-c750-4359-acfa-f2b056e8c8ad" containerID="ac86dd5d4569c163a43ae96800556b27566bb068a9db724d6c483053cf2b565d" exitCode=0 Sep 30 11:17:49 crc kubenswrapper[4730]: I0930 11:17:49.448676 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdnc" event={"ID":"c4c56beb-c750-4359-acfa-f2b056e8c8ad","Type":"ContainerDied","Data":"ac86dd5d4569c163a43ae96800556b27566bb068a9db724d6c483053cf2b565d"} Sep 30 11:17:50 crc kubenswrapper[4730]: I0930 11:17:50.462299 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdnc" event={"ID":"c4c56beb-c750-4359-acfa-f2b056e8c8ad","Type":"ContainerStarted","Data":"3d293bbe0b45873ed5502f47ee8e0c3c925e3a3d5d725725ff3a503260ea0ef5"} Sep 30 11:17:50 crc kubenswrapper[4730]: I0930 11:17:50.494209 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pvdnc" podStartSLOduration=2.9080982840000003 podStartE2EDuration="5.494186734s" podCreationTimestamp="2025-09-30 11:17:45 +0000 UTC" firstStartedPulling="2025-09-30 11:17:47.42087873 +0000 UTC m=+5311.754138733" lastFinishedPulling="2025-09-30 11:17:50.00696719 +0000 UTC m=+5314.340227183" observedRunningTime="2025-09-30 11:17:50.484679834 +0000 UTC m=+5314.817939827" watchObservedRunningTime="2025-09-30 11:17:50.494186734 +0000 UTC m=+5314.827446747" Sep 30 11:17:56 crc kubenswrapper[4730]: I0930 11:17:56.264847 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:56 crc kubenswrapper[4730]: I0930 11:17:56.265554 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:56 crc kubenswrapper[4730]: I0930 11:17:56.358276 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:56 crc kubenswrapper[4730]: I0930 11:17:56.580971 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:56 crc kubenswrapper[4730]: I0930 11:17:56.640563 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pvdnc"] Sep 30 11:17:58 crc kubenswrapper[4730]: I0930 11:17:58.551315 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pvdnc" podUID="c4c56beb-c750-4359-acfa-f2b056e8c8ad" containerName="registry-server" containerID="cri-o://3d293bbe0b45873ed5502f47ee8e0c3c925e3a3d5d725725ff3a503260ea0ef5" gracePeriod=2 Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.519564 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.567374 4730 generic.go:334] "Generic (PLEG): container finished" podID="c4c56beb-c750-4359-acfa-f2b056e8c8ad" containerID="3d293bbe0b45873ed5502f47ee8e0c3c925e3a3d5d725725ff3a503260ea0ef5" exitCode=0 Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.567426 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdnc" event={"ID":"c4c56beb-c750-4359-acfa-f2b056e8c8ad","Type":"ContainerDied","Data":"3d293bbe0b45873ed5502f47ee8e0c3c925e3a3d5d725725ff3a503260ea0ef5"} Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.567474 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvdnc" event={"ID":"c4c56beb-c750-4359-acfa-f2b056e8c8ad","Type":"ContainerDied","Data":"69ccdf70755d7d7b912bfcbbab63531134282e4e05e188ef382ce2236702d2cd"} Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.567506 4730 scope.go:117] "RemoveContainer" containerID="3d293bbe0b45873ed5502f47ee8e0c3c925e3a3d5d725725ff3a503260ea0ef5" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.567803 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvdnc" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.602286 4730 scope.go:117] "RemoveContainer" containerID="ac86dd5d4569c163a43ae96800556b27566bb068a9db724d6c483053cf2b565d" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.623864 4730 scope.go:117] "RemoveContainer" containerID="e3f52d17f3eadfda0525ceb797377693f17ee2172d20682d72220f00b3c2759c" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.666336 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5fvg\" (UniqueName: \"kubernetes.io/projected/c4c56beb-c750-4359-acfa-f2b056e8c8ad-kube-api-access-r5fvg\") pod \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\" (UID: \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\") " Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.666558 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4c56beb-c750-4359-acfa-f2b056e8c8ad-utilities\") pod \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\" (UID: \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\") " Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.666658 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4c56beb-c750-4359-acfa-f2b056e8c8ad-catalog-content\") pod \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\" (UID: \"c4c56beb-c750-4359-acfa-f2b056e8c8ad\") " Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.667666 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4c56beb-c750-4359-acfa-f2b056e8c8ad-utilities" (OuterVolumeSpecName: "utilities") pod "c4c56beb-c750-4359-acfa-f2b056e8c8ad" (UID: "c4c56beb-c750-4359-acfa-f2b056e8c8ad"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.674153 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4c56beb-c750-4359-acfa-f2b056e8c8ad-kube-api-access-r5fvg" (OuterVolumeSpecName: "kube-api-access-r5fvg") pod "c4c56beb-c750-4359-acfa-f2b056e8c8ad" (UID: "c4c56beb-c750-4359-acfa-f2b056e8c8ad"). InnerVolumeSpecName "kube-api-access-r5fvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.675720 4730 scope.go:117] "RemoveContainer" containerID="3d293bbe0b45873ed5502f47ee8e0c3c925e3a3d5d725725ff3a503260ea0ef5" Sep 30 11:17:59 crc kubenswrapper[4730]: E0930 11:17:59.676297 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d293bbe0b45873ed5502f47ee8e0c3c925e3a3d5d725725ff3a503260ea0ef5\": container with ID starting with 3d293bbe0b45873ed5502f47ee8e0c3c925e3a3d5d725725ff3a503260ea0ef5 not found: ID does not exist" containerID="3d293bbe0b45873ed5502f47ee8e0c3c925e3a3d5d725725ff3a503260ea0ef5" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.676343 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d293bbe0b45873ed5502f47ee8e0c3c925e3a3d5d725725ff3a503260ea0ef5"} err="failed to get container status \"3d293bbe0b45873ed5502f47ee8e0c3c925e3a3d5d725725ff3a503260ea0ef5\": rpc error: code = NotFound desc = could not find container \"3d293bbe0b45873ed5502f47ee8e0c3c925e3a3d5d725725ff3a503260ea0ef5\": container with ID starting with 3d293bbe0b45873ed5502f47ee8e0c3c925e3a3d5d725725ff3a503260ea0ef5 not found: ID does not exist" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.676371 4730 scope.go:117] "RemoveContainer" containerID="ac86dd5d4569c163a43ae96800556b27566bb068a9db724d6c483053cf2b565d" Sep 30 11:17:59 crc kubenswrapper[4730]: E0930 11:17:59.676722 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac86dd5d4569c163a43ae96800556b27566bb068a9db724d6c483053cf2b565d\": container with ID starting with ac86dd5d4569c163a43ae96800556b27566bb068a9db724d6c483053cf2b565d not found: ID does not exist" containerID="ac86dd5d4569c163a43ae96800556b27566bb068a9db724d6c483053cf2b565d" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.676755 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac86dd5d4569c163a43ae96800556b27566bb068a9db724d6c483053cf2b565d"} err="failed to get container status \"ac86dd5d4569c163a43ae96800556b27566bb068a9db724d6c483053cf2b565d\": rpc error: code = NotFound desc = could not find container \"ac86dd5d4569c163a43ae96800556b27566bb068a9db724d6c483053cf2b565d\": container with ID starting with ac86dd5d4569c163a43ae96800556b27566bb068a9db724d6c483053cf2b565d not found: ID does not exist" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.676776 4730 scope.go:117] "RemoveContainer" containerID="e3f52d17f3eadfda0525ceb797377693f17ee2172d20682d72220f00b3c2759c" Sep 30 11:17:59 crc kubenswrapper[4730]: E0930 11:17:59.677263 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3f52d17f3eadfda0525ceb797377693f17ee2172d20682d72220f00b3c2759c\": container with ID starting with e3f52d17f3eadfda0525ceb797377693f17ee2172d20682d72220f00b3c2759c not found: ID does not exist" containerID="e3f52d17f3eadfda0525ceb797377693f17ee2172d20682d72220f00b3c2759c" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.677296 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3f52d17f3eadfda0525ceb797377693f17ee2172d20682d72220f00b3c2759c"} err="failed to get container status \"e3f52d17f3eadfda0525ceb797377693f17ee2172d20682d72220f00b3c2759c\": rpc error: code = NotFound desc = could not find container \"e3f52d17f3eadfda0525ceb797377693f17ee2172d20682d72220f00b3c2759c\": container with ID starting with e3f52d17f3eadfda0525ceb797377693f17ee2172d20682d72220f00b3c2759c not found: ID does not exist" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.711254 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4c56beb-c750-4359-acfa-f2b056e8c8ad-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c4c56beb-c750-4359-acfa-f2b056e8c8ad" (UID: "c4c56beb-c750-4359-acfa-f2b056e8c8ad"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.768884 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4c56beb-c750-4359-acfa-f2b056e8c8ad-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.768917 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4c56beb-c750-4359-acfa-f2b056e8c8ad-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.768929 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5fvg\" (UniqueName: \"kubernetes.io/projected/c4c56beb-c750-4359-acfa-f2b056e8c8ad-kube-api-access-r5fvg\") on node \"crc\" DevicePath \"\"" Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.907982 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pvdnc"] Sep 30 11:17:59 crc kubenswrapper[4730]: I0930 11:17:59.915603 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pvdnc"] Sep 30 11:18:00 crc kubenswrapper[4730]: I0930 11:18:00.402132 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4c56beb-c750-4359-acfa-f2b056e8c8ad" path="/var/lib/kubelet/pods/c4c56beb-c750-4359-acfa-f2b056e8c8ad/volumes" Sep 30 11:19:02 crc kubenswrapper[4730]: I0930 11:19:02.337135 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:19:02 crc kubenswrapper[4730]: I0930 11:19:02.337710 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:19:32 crc kubenswrapper[4730]: I0930 11:19:32.336437 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:19:32 crc kubenswrapper[4730]: I0930 11:19:32.337045 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.196927 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ck8wf"] Sep 30 11:19:59 crc kubenswrapper[4730]: E0930 11:19:59.197812 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4c56beb-c750-4359-acfa-f2b056e8c8ad" containerName="extract-utilities" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.197825 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4c56beb-c750-4359-acfa-f2b056e8c8ad" containerName="extract-utilities" Sep 30 11:19:59 crc kubenswrapper[4730]: E0930 11:19:59.197847 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4c56beb-c750-4359-acfa-f2b056e8c8ad" containerName="extract-content" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.197854 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4c56beb-c750-4359-acfa-f2b056e8c8ad" containerName="extract-content" Sep 30 11:19:59 crc kubenswrapper[4730]: E0930 11:19:59.197867 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4c56beb-c750-4359-acfa-f2b056e8c8ad" containerName="registry-server" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.197873 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4c56beb-c750-4359-acfa-f2b056e8c8ad" containerName="registry-server" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.198073 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4c56beb-c750-4359-acfa-f2b056e8c8ad" containerName="registry-server" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.199543 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.216764 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ck8wf"] Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.304413 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fht8b\" (UniqueName: \"kubernetes.io/projected/e1815937-e8e1-4895-8f70-a7c52c9e571d-kube-api-access-fht8b\") pod \"redhat-marketplace-ck8wf\" (UID: \"e1815937-e8e1-4895-8f70-a7c52c9e571d\") " pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.304500 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1815937-e8e1-4895-8f70-a7c52c9e571d-utilities\") pod \"redhat-marketplace-ck8wf\" (UID: \"e1815937-e8e1-4895-8f70-a7c52c9e571d\") " pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.304575 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1815937-e8e1-4895-8f70-a7c52c9e571d-catalog-content\") pod \"redhat-marketplace-ck8wf\" (UID: \"e1815937-e8e1-4895-8f70-a7c52c9e571d\") " pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.406923 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fht8b\" (UniqueName: \"kubernetes.io/projected/e1815937-e8e1-4895-8f70-a7c52c9e571d-kube-api-access-fht8b\") pod \"redhat-marketplace-ck8wf\" (UID: \"e1815937-e8e1-4895-8f70-a7c52c9e571d\") " pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.407011 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1815937-e8e1-4895-8f70-a7c52c9e571d-utilities\") pod \"redhat-marketplace-ck8wf\" (UID: \"e1815937-e8e1-4895-8f70-a7c52c9e571d\") " pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.407088 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1815937-e8e1-4895-8f70-a7c52c9e571d-catalog-content\") pod \"redhat-marketplace-ck8wf\" (UID: \"e1815937-e8e1-4895-8f70-a7c52c9e571d\") " pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.407693 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1815937-e8e1-4895-8f70-a7c52c9e571d-utilities\") pod \"redhat-marketplace-ck8wf\" (UID: \"e1815937-e8e1-4895-8f70-a7c52c9e571d\") " pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.408459 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1815937-e8e1-4895-8f70-a7c52c9e571d-catalog-content\") pod \"redhat-marketplace-ck8wf\" (UID: \"e1815937-e8e1-4895-8f70-a7c52c9e571d\") " pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.432155 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fht8b\" (UniqueName: \"kubernetes.io/projected/e1815937-e8e1-4895-8f70-a7c52c9e571d-kube-api-access-fht8b\") pod \"redhat-marketplace-ck8wf\" (UID: \"e1815937-e8e1-4895-8f70-a7c52c9e571d\") " pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:19:59 crc kubenswrapper[4730]: I0930 11:19:59.541256 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:20:00 crc kubenswrapper[4730]: I0930 11:20:00.035903 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ck8wf"] Sep 30 11:20:00 crc kubenswrapper[4730]: I0930 11:20:00.765452 4730 generic.go:334] "Generic (PLEG): container finished" podID="e1815937-e8e1-4895-8f70-a7c52c9e571d" containerID="980a3447bd9b6c0e951b32fc8af871e624bef36986d4ce2ca28b81413f4faebe" exitCode=0 Sep 30 11:20:00 crc kubenswrapper[4730]: I0930 11:20:00.765666 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ck8wf" event={"ID":"e1815937-e8e1-4895-8f70-a7c52c9e571d","Type":"ContainerDied","Data":"980a3447bd9b6c0e951b32fc8af871e624bef36986d4ce2ca28b81413f4faebe"} Sep 30 11:20:00 crc kubenswrapper[4730]: I0930 11:20:00.766102 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ck8wf" event={"ID":"e1815937-e8e1-4895-8f70-a7c52c9e571d","Type":"ContainerStarted","Data":"e7c57b8221b842554075dd20641cff809930f2072245765e2b70331bfedfb6b0"} Sep 30 11:20:02 crc kubenswrapper[4730]: I0930 11:20:02.340067 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:20:02 crc kubenswrapper[4730]: I0930 11:20:02.340598 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:20:02 crc kubenswrapper[4730]: I0930 11:20:02.340661 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 11:20:02 crc kubenswrapper[4730]: I0930 11:20:02.341429 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"971ed7b5b2b9c54a39bf2857f1ba0c9a9fbe91b959a068fc5eea3cfdab95f171"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 11:20:02 crc kubenswrapper[4730]: I0930 11:20:02.341477 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://971ed7b5b2b9c54a39bf2857f1ba0c9a9fbe91b959a068fc5eea3cfdab95f171" gracePeriod=600 Sep 30 11:20:02 crc kubenswrapper[4730]: E0930 11:20:02.514802 4730 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod95bd4436_8399_478d_9552_c9ba5ae8f327.slice/crio-971ed7b5b2b9c54a39bf2857f1ba0c9a9fbe91b959a068fc5eea3cfdab95f171.scope\": RecentStats: unable to find data in memory cache]" Sep 30 11:20:02 crc kubenswrapper[4730]: I0930 11:20:02.788146 4730 generic.go:334] "Generic (PLEG): container finished" podID="e1815937-e8e1-4895-8f70-a7c52c9e571d" containerID="8266892a64aef7b592992d91d379f73242409013982e1f41c874a2161b485b25" exitCode=0 Sep 30 11:20:02 crc kubenswrapper[4730]: I0930 11:20:02.788235 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ck8wf" event={"ID":"e1815937-e8e1-4895-8f70-a7c52c9e571d","Type":"ContainerDied","Data":"8266892a64aef7b592992d91d379f73242409013982e1f41c874a2161b485b25"} Sep 30 11:20:02 crc kubenswrapper[4730]: I0930 11:20:02.796326 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="971ed7b5b2b9c54a39bf2857f1ba0c9a9fbe91b959a068fc5eea3cfdab95f171" exitCode=0 Sep 30 11:20:02 crc kubenswrapper[4730]: I0930 11:20:02.796375 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"971ed7b5b2b9c54a39bf2857f1ba0c9a9fbe91b959a068fc5eea3cfdab95f171"} Sep 30 11:20:02 crc kubenswrapper[4730]: I0930 11:20:02.796440 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35"} Sep 30 11:20:02 crc kubenswrapper[4730]: I0930 11:20:02.796458 4730 scope.go:117] "RemoveContainer" containerID="92c7793e7eedc6dd3bf5ea2862a618ee0a43e85f4db43416aa6b6a8fcb84a542" Sep 30 11:20:03 crc kubenswrapper[4730]: I0930 11:20:03.807846 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ck8wf" event={"ID":"e1815937-e8e1-4895-8f70-a7c52c9e571d","Type":"ContainerStarted","Data":"f62c1559b363761c652853182f8a8cb632a1756cf401940129eb9b7a9b28795e"} Sep 30 11:20:03 crc kubenswrapper[4730]: I0930 11:20:03.833877 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ck8wf" podStartSLOduration=2.394510092 podStartE2EDuration="4.833859494s" podCreationTimestamp="2025-09-30 11:19:59 +0000 UTC" firstStartedPulling="2025-09-30 11:20:00.768035967 +0000 UTC m=+5445.101295960" lastFinishedPulling="2025-09-30 11:20:03.207385369 +0000 UTC m=+5447.540645362" observedRunningTime="2025-09-30 11:20:03.828424472 +0000 UTC m=+5448.161684465" watchObservedRunningTime="2025-09-30 11:20:03.833859494 +0000 UTC m=+5448.167119487" Sep 30 11:20:09 crc kubenswrapper[4730]: I0930 11:20:09.541496 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:20:09 crc kubenswrapper[4730]: I0930 11:20:09.542968 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:20:09 crc kubenswrapper[4730]: I0930 11:20:09.589027 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:20:09 crc kubenswrapper[4730]: I0930 11:20:09.923959 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:20:09 crc kubenswrapper[4730]: I0930 11:20:09.974648 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ck8wf"] Sep 30 11:20:11 crc kubenswrapper[4730]: I0930 11:20:11.883836 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ck8wf" podUID="e1815937-e8e1-4895-8f70-a7c52c9e571d" containerName="registry-server" containerID="cri-o://f62c1559b363761c652853182f8a8cb632a1756cf401940129eb9b7a9b28795e" gracePeriod=2 Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.629465 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.698713 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1815937-e8e1-4895-8f70-a7c52c9e571d-utilities\") pod \"e1815937-e8e1-4895-8f70-a7c52c9e571d\" (UID: \"e1815937-e8e1-4895-8f70-a7c52c9e571d\") " Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.698970 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fht8b\" (UniqueName: \"kubernetes.io/projected/e1815937-e8e1-4895-8f70-a7c52c9e571d-kube-api-access-fht8b\") pod \"e1815937-e8e1-4895-8f70-a7c52c9e571d\" (UID: \"e1815937-e8e1-4895-8f70-a7c52c9e571d\") " Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.699173 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1815937-e8e1-4895-8f70-a7c52c9e571d-catalog-content\") pod \"e1815937-e8e1-4895-8f70-a7c52c9e571d\" (UID: \"e1815937-e8e1-4895-8f70-a7c52c9e571d\") " Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.700264 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1815937-e8e1-4895-8f70-a7c52c9e571d-utilities" (OuterVolumeSpecName: "utilities") pod "e1815937-e8e1-4895-8f70-a7c52c9e571d" (UID: "e1815937-e8e1-4895-8f70-a7c52c9e571d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.713880 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1815937-e8e1-4895-8f70-a7c52c9e571d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.717360 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1815937-e8e1-4895-8f70-a7c52c9e571d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e1815937-e8e1-4895-8f70-a7c52c9e571d" (UID: "e1815937-e8e1-4895-8f70-a7c52c9e571d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.739859 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1815937-e8e1-4895-8f70-a7c52c9e571d-kube-api-access-fht8b" (OuterVolumeSpecName: "kube-api-access-fht8b") pod "e1815937-e8e1-4895-8f70-a7c52c9e571d" (UID: "e1815937-e8e1-4895-8f70-a7c52c9e571d"). InnerVolumeSpecName "kube-api-access-fht8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.816338 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1815937-e8e1-4895-8f70-a7c52c9e571d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.816376 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fht8b\" (UniqueName: \"kubernetes.io/projected/e1815937-e8e1-4895-8f70-a7c52c9e571d-kube-api-access-fht8b\") on node \"crc\" DevicePath \"\"" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.893899 4730 generic.go:334] "Generic (PLEG): container finished" podID="e1815937-e8e1-4895-8f70-a7c52c9e571d" containerID="f62c1559b363761c652853182f8a8cb632a1756cf401940129eb9b7a9b28795e" exitCode=0 Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.893947 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ck8wf" event={"ID":"e1815937-e8e1-4895-8f70-a7c52c9e571d","Type":"ContainerDied","Data":"f62c1559b363761c652853182f8a8cb632a1756cf401940129eb9b7a9b28795e"} Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.893966 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ck8wf" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.893987 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ck8wf" event={"ID":"e1815937-e8e1-4895-8f70-a7c52c9e571d","Type":"ContainerDied","Data":"e7c57b8221b842554075dd20641cff809930f2072245765e2b70331bfedfb6b0"} Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.894015 4730 scope.go:117] "RemoveContainer" containerID="f62c1559b363761c652853182f8a8cb632a1756cf401940129eb9b7a9b28795e" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.923173 4730 scope.go:117] "RemoveContainer" containerID="8266892a64aef7b592992d91d379f73242409013982e1f41c874a2161b485b25" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.931470 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ck8wf"] Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.939603 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ck8wf"] Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.947058 4730 scope.go:117] "RemoveContainer" containerID="980a3447bd9b6c0e951b32fc8af871e624bef36986d4ce2ca28b81413f4faebe" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.998330 4730 scope.go:117] "RemoveContainer" containerID="f62c1559b363761c652853182f8a8cb632a1756cf401940129eb9b7a9b28795e" Sep 30 11:20:12 crc kubenswrapper[4730]: E0930 11:20:12.999042 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f62c1559b363761c652853182f8a8cb632a1756cf401940129eb9b7a9b28795e\": container with ID starting with f62c1559b363761c652853182f8a8cb632a1756cf401940129eb9b7a9b28795e not found: ID does not exist" containerID="f62c1559b363761c652853182f8a8cb632a1756cf401940129eb9b7a9b28795e" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.999095 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f62c1559b363761c652853182f8a8cb632a1756cf401940129eb9b7a9b28795e"} err="failed to get container status \"f62c1559b363761c652853182f8a8cb632a1756cf401940129eb9b7a9b28795e\": rpc error: code = NotFound desc = could not find container \"f62c1559b363761c652853182f8a8cb632a1756cf401940129eb9b7a9b28795e\": container with ID starting with f62c1559b363761c652853182f8a8cb632a1756cf401940129eb9b7a9b28795e not found: ID does not exist" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.999117 4730 scope.go:117] "RemoveContainer" containerID="8266892a64aef7b592992d91d379f73242409013982e1f41c874a2161b485b25" Sep 30 11:20:12 crc kubenswrapper[4730]: E0930 11:20:12.999552 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8266892a64aef7b592992d91d379f73242409013982e1f41c874a2161b485b25\": container with ID starting with 8266892a64aef7b592992d91d379f73242409013982e1f41c874a2161b485b25 not found: ID does not exist" containerID="8266892a64aef7b592992d91d379f73242409013982e1f41c874a2161b485b25" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.999596 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8266892a64aef7b592992d91d379f73242409013982e1f41c874a2161b485b25"} err="failed to get container status \"8266892a64aef7b592992d91d379f73242409013982e1f41c874a2161b485b25\": rpc error: code = NotFound desc = could not find container \"8266892a64aef7b592992d91d379f73242409013982e1f41c874a2161b485b25\": container with ID starting with 8266892a64aef7b592992d91d379f73242409013982e1f41c874a2161b485b25 not found: ID does not exist" Sep 30 11:20:12 crc kubenswrapper[4730]: I0930 11:20:12.999645 4730 scope.go:117] "RemoveContainer" containerID="980a3447bd9b6c0e951b32fc8af871e624bef36986d4ce2ca28b81413f4faebe" Sep 30 11:20:13 crc kubenswrapper[4730]: E0930 11:20:13.000015 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"980a3447bd9b6c0e951b32fc8af871e624bef36986d4ce2ca28b81413f4faebe\": container with ID starting with 980a3447bd9b6c0e951b32fc8af871e624bef36986d4ce2ca28b81413f4faebe not found: ID does not exist" containerID="980a3447bd9b6c0e951b32fc8af871e624bef36986d4ce2ca28b81413f4faebe" Sep 30 11:20:13 crc kubenswrapper[4730]: I0930 11:20:13.000057 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"980a3447bd9b6c0e951b32fc8af871e624bef36986d4ce2ca28b81413f4faebe"} err="failed to get container status \"980a3447bd9b6c0e951b32fc8af871e624bef36986d4ce2ca28b81413f4faebe\": rpc error: code = NotFound desc = could not find container \"980a3447bd9b6c0e951b32fc8af871e624bef36986d4ce2ca28b81413f4faebe\": container with ID starting with 980a3447bd9b6c0e951b32fc8af871e624bef36986d4ce2ca28b81413f4faebe not found: ID does not exist" Sep 30 11:20:14 crc kubenswrapper[4730]: I0930 11:20:14.394447 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1815937-e8e1-4895-8f70-a7c52c9e571d" path="/var/lib/kubelet/pods/e1815937-e8e1-4895-8f70-a7c52c9e571d/volumes" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.600011 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zdlqc"] Sep 30 11:21:36 crc kubenswrapper[4730]: E0930 11:21:36.601816 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1815937-e8e1-4895-8f70-a7c52c9e571d" containerName="registry-server" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.601878 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1815937-e8e1-4895-8f70-a7c52c9e571d" containerName="registry-server" Sep 30 11:21:36 crc kubenswrapper[4730]: E0930 11:21:36.601955 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1815937-e8e1-4895-8f70-a7c52c9e571d" containerName="extract-utilities" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.601975 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1815937-e8e1-4895-8f70-a7c52c9e571d" containerName="extract-utilities" Sep 30 11:21:36 crc kubenswrapper[4730]: E0930 11:21:36.602031 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1815937-e8e1-4895-8f70-a7c52c9e571d" containerName="extract-content" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.602050 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1815937-e8e1-4895-8f70-a7c52c9e571d" containerName="extract-content" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.602479 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1815937-e8e1-4895-8f70-a7c52c9e571d" containerName="registry-server" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.606167 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.614531 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zdlqc"] Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.647039 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xp9sr\" (UniqueName: \"kubernetes.io/projected/a621e0c4-5687-4e20-9c31-11b5ee23f644-kube-api-access-xp9sr\") pod \"certified-operators-zdlqc\" (UID: \"a621e0c4-5687-4e20-9c31-11b5ee23f644\") " pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.647245 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a621e0c4-5687-4e20-9c31-11b5ee23f644-catalog-content\") pod \"certified-operators-zdlqc\" (UID: \"a621e0c4-5687-4e20-9c31-11b5ee23f644\") " pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.647300 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a621e0c4-5687-4e20-9c31-11b5ee23f644-utilities\") pod \"certified-operators-zdlqc\" (UID: \"a621e0c4-5687-4e20-9c31-11b5ee23f644\") " pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.749868 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a621e0c4-5687-4e20-9c31-11b5ee23f644-catalog-content\") pod \"certified-operators-zdlqc\" (UID: \"a621e0c4-5687-4e20-9c31-11b5ee23f644\") " pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.749943 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a621e0c4-5687-4e20-9c31-11b5ee23f644-utilities\") pod \"certified-operators-zdlqc\" (UID: \"a621e0c4-5687-4e20-9c31-11b5ee23f644\") " pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.750436 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a621e0c4-5687-4e20-9c31-11b5ee23f644-catalog-content\") pod \"certified-operators-zdlqc\" (UID: \"a621e0c4-5687-4e20-9c31-11b5ee23f644\") " pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.750483 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a621e0c4-5687-4e20-9c31-11b5ee23f644-utilities\") pod \"certified-operators-zdlqc\" (UID: \"a621e0c4-5687-4e20-9c31-11b5ee23f644\") " pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.750665 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xp9sr\" (UniqueName: \"kubernetes.io/projected/a621e0c4-5687-4e20-9c31-11b5ee23f644-kube-api-access-xp9sr\") pod \"certified-operators-zdlqc\" (UID: \"a621e0c4-5687-4e20-9c31-11b5ee23f644\") " pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.769572 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xp9sr\" (UniqueName: \"kubernetes.io/projected/a621e0c4-5687-4e20-9c31-11b5ee23f644-kube-api-access-xp9sr\") pod \"certified-operators-zdlqc\" (UID: \"a621e0c4-5687-4e20-9c31-11b5ee23f644\") " pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:36 crc kubenswrapper[4730]: I0930 11:21:36.932465 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:37 crc kubenswrapper[4730]: I0930 11:21:37.486777 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zdlqc"] Sep 30 11:21:37 crc kubenswrapper[4730]: I0930 11:21:37.792187 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zdlqc" event={"ID":"a621e0c4-5687-4e20-9c31-11b5ee23f644","Type":"ContainerStarted","Data":"de4533d67d3a371b66591f2e8d0011ea75139242a03f018082eaf28a23d04b4e"} Sep 30 11:21:37 crc kubenswrapper[4730]: I0930 11:21:37.792229 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zdlqc" event={"ID":"a621e0c4-5687-4e20-9c31-11b5ee23f644","Type":"ContainerStarted","Data":"65e127ff3211ed2b919354f2155540d9fbe6495701a5329fb64b5e9db1426aa1"} Sep 30 11:21:38 crc kubenswrapper[4730]: I0930 11:21:38.806541 4730 generic.go:334] "Generic (PLEG): container finished" podID="a621e0c4-5687-4e20-9c31-11b5ee23f644" containerID="de4533d67d3a371b66591f2e8d0011ea75139242a03f018082eaf28a23d04b4e" exitCode=0 Sep 30 11:21:38 crc kubenswrapper[4730]: I0930 11:21:38.806686 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zdlqc" event={"ID":"a621e0c4-5687-4e20-9c31-11b5ee23f644","Type":"ContainerDied","Data":"de4533d67d3a371b66591f2e8d0011ea75139242a03f018082eaf28a23d04b4e"} Sep 30 11:21:38 crc kubenswrapper[4730]: I0930 11:21:38.809713 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 11:21:43 crc kubenswrapper[4730]: I0930 11:21:43.860488 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zdlqc" event={"ID":"a621e0c4-5687-4e20-9c31-11b5ee23f644","Type":"ContainerStarted","Data":"bc32cb735d1793334434bd5ccc5526846d245d58c0b6888eaf52a74b2644fa48"} Sep 30 11:21:44 crc kubenswrapper[4730]: I0930 11:21:44.870841 4730 generic.go:334] "Generic (PLEG): container finished" podID="a621e0c4-5687-4e20-9c31-11b5ee23f644" containerID="bc32cb735d1793334434bd5ccc5526846d245d58c0b6888eaf52a74b2644fa48" exitCode=0 Sep 30 11:21:44 crc kubenswrapper[4730]: I0930 11:21:44.870898 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zdlqc" event={"ID":"a621e0c4-5687-4e20-9c31-11b5ee23f644","Type":"ContainerDied","Data":"bc32cb735d1793334434bd5ccc5526846d245d58c0b6888eaf52a74b2644fa48"} Sep 30 11:21:45 crc kubenswrapper[4730]: I0930 11:21:45.885745 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zdlqc" event={"ID":"a621e0c4-5687-4e20-9c31-11b5ee23f644","Type":"ContainerStarted","Data":"c4cde6fd15e42c23455679fe96cd88348472440029fcbe8fcf35f434e98d32be"} Sep 30 11:21:45 crc kubenswrapper[4730]: I0930 11:21:45.901552 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zdlqc" podStartSLOduration=3.235222498 podStartE2EDuration="9.901533513s" podCreationTimestamp="2025-09-30 11:21:36 +0000 UTC" firstStartedPulling="2025-09-30 11:21:38.809376248 +0000 UTC m=+5543.142636241" lastFinishedPulling="2025-09-30 11:21:45.475687263 +0000 UTC m=+5549.808947256" observedRunningTime="2025-09-30 11:21:45.900499566 +0000 UTC m=+5550.233759579" watchObservedRunningTime="2025-09-30 11:21:45.901533513 +0000 UTC m=+5550.234793506" Sep 30 11:21:46 crc kubenswrapper[4730]: I0930 11:21:46.932772 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:46 crc kubenswrapper[4730]: I0930 11:21:46.932848 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:48 crc kubenswrapper[4730]: I0930 11:21:48.392554 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-zdlqc" podUID="a621e0c4-5687-4e20-9c31-11b5ee23f644" containerName="registry-server" probeResult="failure" output=< Sep 30 11:21:48 crc kubenswrapper[4730]: timeout: failed to connect service ":50051" within 1s Sep 30 11:21:48 crc kubenswrapper[4730]: > Sep 30 11:21:57 crc kubenswrapper[4730]: I0930 11:21:57.006724 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:57 crc kubenswrapper[4730]: I0930 11:21:57.069234 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zdlqc" Sep 30 11:21:57 crc kubenswrapper[4730]: I0930 11:21:57.356892 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zdlqc"] Sep 30 11:21:57 crc kubenswrapper[4730]: I0930 11:21:57.511725 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cgfsm"] Sep 30 11:21:57 crc kubenswrapper[4730]: I0930 11:21:57.512022 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cgfsm" podUID="05e9d108-9923-450e-b0cb-9cf6a5bc4a54" containerName="registry-server" containerID="cri-o://095b9e654232b717113b6d2129415c6275da1e1499218ae0eed0528f1251702a" gracePeriod=2 Sep 30 11:21:58 crc kubenswrapper[4730]: I0930 11:21:58.003235 4730 generic.go:334] "Generic (PLEG): container finished" podID="05e9d108-9923-450e-b0cb-9cf6a5bc4a54" containerID="095b9e654232b717113b6d2129415c6275da1e1499218ae0eed0528f1251702a" exitCode=0 Sep 30 11:21:58 crc kubenswrapper[4730]: I0930 11:21:58.003301 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgfsm" event={"ID":"05e9d108-9923-450e-b0cb-9cf6a5bc4a54","Type":"ContainerDied","Data":"095b9e654232b717113b6d2129415c6275da1e1499218ae0eed0528f1251702a"} Sep 30 11:21:58 crc kubenswrapper[4730]: I0930 11:21:58.644690 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 11:21:58 crc kubenswrapper[4730]: I0930 11:21:58.751822 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvstj\" (UniqueName: \"kubernetes.io/projected/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-kube-api-access-rvstj\") pod \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\" (UID: \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\") " Sep 30 11:21:58 crc kubenswrapper[4730]: I0930 11:21:58.751947 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-utilities\") pod \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\" (UID: \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\") " Sep 30 11:21:58 crc kubenswrapper[4730]: I0930 11:21:58.751993 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-catalog-content\") pod \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\" (UID: \"05e9d108-9923-450e-b0cb-9cf6a5bc4a54\") " Sep 30 11:21:58 crc kubenswrapper[4730]: I0930 11:21:58.753935 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-utilities" (OuterVolumeSpecName: "utilities") pod "05e9d108-9923-450e-b0cb-9cf6a5bc4a54" (UID: "05e9d108-9923-450e-b0cb-9cf6a5bc4a54"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:21:58 crc kubenswrapper[4730]: I0930 11:21:58.763649 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-kube-api-access-rvstj" (OuterVolumeSpecName: "kube-api-access-rvstj") pod "05e9d108-9923-450e-b0cb-9cf6a5bc4a54" (UID: "05e9d108-9923-450e-b0cb-9cf6a5bc4a54"). InnerVolumeSpecName "kube-api-access-rvstj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:21:58 crc kubenswrapper[4730]: I0930 11:21:58.854292 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvstj\" (UniqueName: \"kubernetes.io/projected/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-kube-api-access-rvstj\") on node \"crc\" DevicePath \"\"" Sep 30 11:21:58 crc kubenswrapper[4730]: I0930 11:21:58.854591 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:21:58 crc kubenswrapper[4730]: I0930 11:21:58.858691 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "05e9d108-9923-450e-b0cb-9cf6a5bc4a54" (UID: "05e9d108-9923-450e-b0cb-9cf6a5bc4a54"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:21:58 crc kubenswrapper[4730]: I0930 11:21:58.956869 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05e9d108-9923-450e-b0cb-9cf6a5bc4a54-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:21:59 crc kubenswrapper[4730]: I0930 11:21:59.015454 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgfsm" event={"ID":"05e9d108-9923-450e-b0cb-9cf6a5bc4a54","Type":"ContainerDied","Data":"3ab7b0575d641b8f90c0a7fe93a0ff5fe7d29d9874eeb0559a2d98ecf9556ac1"} Sep 30 11:21:59 crc kubenswrapper[4730]: I0930 11:21:59.015507 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cgfsm" Sep 30 11:21:59 crc kubenswrapper[4730]: I0930 11:21:59.015529 4730 scope.go:117] "RemoveContainer" containerID="095b9e654232b717113b6d2129415c6275da1e1499218ae0eed0528f1251702a" Sep 30 11:21:59 crc kubenswrapper[4730]: I0930 11:21:59.045130 4730 scope.go:117] "RemoveContainer" containerID="59af014563d2f4851a152f9f6375def1fba9ee40901e9fd3a8c3177f9ad0b898" Sep 30 11:21:59 crc kubenswrapper[4730]: I0930 11:21:59.051945 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cgfsm"] Sep 30 11:21:59 crc kubenswrapper[4730]: I0930 11:21:59.070017 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cgfsm"] Sep 30 11:21:59 crc kubenswrapper[4730]: I0930 11:21:59.077219 4730 scope.go:117] "RemoveContainer" containerID="3f44dcf1a0d6fd522e4b234f6291f95eb85a838316107bafb3c3a8da0eb15005" Sep 30 11:22:00 crc kubenswrapper[4730]: I0930 11:22:00.393684 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05e9d108-9923-450e-b0cb-9cf6a5bc4a54" path="/var/lib/kubelet/pods/05e9d108-9923-450e-b0cb-9cf6a5bc4a54/volumes" Sep 30 11:22:02 crc kubenswrapper[4730]: I0930 11:22:02.337218 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:22:02 crc kubenswrapper[4730]: I0930 11:22:02.337495 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:22:32 crc kubenswrapper[4730]: I0930 11:22:32.337230 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:22:32 crc kubenswrapper[4730]: I0930 11:22:32.337742 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:23:02 crc kubenswrapper[4730]: I0930 11:23:02.336868 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:23:02 crc kubenswrapper[4730]: I0930 11:23:02.337472 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:23:02 crc kubenswrapper[4730]: I0930 11:23:02.337533 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 11:23:02 crc kubenswrapper[4730]: I0930 11:23:02.338559 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 11:23:02 crc kubenswrapper[4730]: I0930 11:23:02.338707 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" gracePeriod=600 Sep 30 11:23:02 crc kubenswrapper[4730]: E0930 11:23:02.483583 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:23:02 crc kubenswrapper[4730]: I0930 11:23:02.645870 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" exitCode=0 Sep 30 11:23:02 crc kubenswrapper[4730]: I0930 11:23:02.645910 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35"} Sep 30 11:23:02 crc kubenswrapper[4730]: I0930 11:23:02.645943 4730 scope.go:117] "RemoveContainer" containerID="971ed7b5b2b9c54a39bf2857f1ba0c9a9fbe91b959a068fc5eea3cfdab95f171" Sep 30 11:23:02 crc kubenswrapper[4730]: I0930 11:23:02.646593 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:23:02 crc kubenswrapper[4730]: E0930 11:23:02.647017 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:23:13 crc kubenswrapper[4730]: I0930 11:23:13.380885 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:23:13 crc kubenswrapper[4730]: E0930 11:23:13.381652 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:23:25 crc kubenswrapper[4730]: I0930 11:23:25.380643 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:23:25 crc kubenswrapper[4730]: E0930 11:23:25.381354 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:23:40 crc kubenswrapper[4730]: I0930 11:23:40.382216 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:23:40 crc kubenswrapper[4730]: E0930 11:23:40.382919 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:23:54 crc kubenswrapper[4730]: I0930 11:23:54.381653 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:23:54 crc kubenswrapper[4730]: E0930 11:23:54.382457 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:24:06 crc kubenswrapper[4730]: I0930 11:24:06.393356 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:24:06 crc kubenswrapper[4730]: E0930 11:24:06.395488 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:24:21 crc kubenswrapper[4730]: I0930 11:24:21.381725 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:24:21 crc kubenswrapper[4730]: E0930 11:24:21.383031 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:24:34 crc kubenswrapper[4730]: I0930 11:24:34.383817 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:24:34 crc kubenswrapper[4730]: E0930 11:24:34.384752 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:24:48 crc kubenswrapper[4730]: I0930 11:24:48.381558 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:24:48 crc kubenswrapper[4730]: E0930 11:24:48.382412 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:25:01 crc kubenswrapper[4730]: I0930 11:25:01.382840 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:25:01 crc kubenswrapper[4730]: E0930 11:25:01.384329 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:25:14 crc kubenswrapper[4730]: I0930 11:25:14.381330 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:25:14 crc kubenswrapper[4730]: E0930 11:25:14.382765 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:25:26 crc kubenswrapper[4730]: I0930 11:25:26.399502 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:25:26 crc kubenswrapper[4730]: E0930 11:25:26.401533 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:25:37 crc kubenswrapper[4730]: I0930 11:25:37.380986 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:25:37 crc kubenswrapper[4730]: E0930 11:25:37.381822 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:25:51 crc kubenswrapper[4730]: I0930 11:25:51.381972 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:25:51 crc kubenswrapper[4730]: E0930 11:25:51.383353 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:26:06 crc kubenswrapper[4730]: I0930 11:26:06.389952 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:26:06 crc kubenswrapper[4730]: E0930 11:26:06.390855 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:26:17 crc kubenswrapper[4730]: I0930 11:26:17.381414 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:26:17 crc kubenswrapper[4730]: E0930 11:26:17.382523 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:26:29 crc kubenswrapper[4730]: I0930 11:26:29.382717 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:26:29 crc kubenswrapper[4730]: E0930 11:26:29.383543 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:26:43 crc kubenswrapper[4730]: I0930 11:26:43.382191 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:26:43 crc kubenswrapper[4730]: E0930 11:26:43.383426 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:26:57 crc kubenswrapper[4730]: I0930 11:26:57.382244 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:26:57 crc kubenswrapper[4730]: E0930 11:26:57.384173 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:27:10 crc kubenswrapper[4730]: I0930 11:27:10.380860 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:27:10 crc kubenswrapper[4730]: E0930 11:27:10.381760 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:27:25 crc kubenswrapper[4730]: I0930 11:27:25.380779 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:27:25 crc kubenswrapper[4730]: E0930 11:27:25.381628 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:27:40 crc kubenswrapper[4730]: I0930 11:27:40.380666 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:27:40 crc kubenswrapper[4730]: E0930 11:27:40.381513 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:27:53 crc kubenswrapper[4730]: I0930 11:27:53.383001 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:27:53 crc kubenswrapper[4730]: E0930 11:27:53.384092 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:28:08 crc kubenswrapper[4730]: I0930 11:28:08.380666 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:28:08 crc kubenswrapper[4730]: I0930 11:28:08.753780 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"949efe096ebda614ec0ed66c9871e458cfa45addba67825624556af6913f3633"} Sep 30 11:28:14 crc kubenswrapper[4730]: I0930 11:28:14.822943 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-k4tfm"] Sep 30 11:28:14 crc kubenswrapper[4730]: E0930 11:28:14.824038 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05e9d108-9923-450e-b0cb-9cf6a5bc4a54" containerName="registry-server" Sep 30 11:28:14 crc kubenswrapper[4730]: I0930 11:28:14.824056 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="05e9d108-9923-450e-b0cb-9cf6a5bc4a54" containerName="registry-server" Sep 30 11:28:14 crc kubenswrapper[4730]: E0930 11:28:14.824084 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05e9d108-9923-450e-b0cb-9cf6a5bc4a54" containerName="extract-utilities" Sep 30 11:28:14 crc kubenswrapper[4730]: I0930 11:28:14.824092 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="05e9d108-9923-450e-b0cb-9cf6a5bc4a54" containerName="extract-utilities" Sep 30 11:28:14 crc kubenswrapper[4730]: E0930 11:28:14.824106 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05e9d108-9923-450e-b0cb-9cf6a5bc4a54" containerName="extract-content" Sep 30 11:28:14 crc kubenswrapper[4730]: I0930 11:28:14.824114 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="05e9d108-9923-450e-b0cb-9cf6a5bc4a54" containerName="extract-content" Sep 30 11:28:14 crc kubenswrapper[4730]: I0930 11:28:14.824358 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="05e9d108-9923-450e-b0cb-9cf6a5bc4a54" containerName="registry-server" Sep 30 11:28:14 crc kubenswrapper[4730]: I0930 11:28:14.826101 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:14 crc kubenswrapper[4730]: I0930 11:28:14.834993 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k4tfm"] Sep 30 11:28:14 crc kubenswrapper[4730]: I0930 11:28:14.918913 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41b27951-c0ca-41f7-bc1a-74ef582a04f1-utilities\") pod \"community-operators-k4tfm\" (UID: \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\") " pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:14 crc kubenswrapper[4730]: I0930 11:28:14.919084 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fg7k\" (UniqueName: \"kubernetes.io/projected/41b27951-c0ca-41f7-bc1a-74ef582a04f1-kube-api-access-2fg7k\") pod \"community-operators-k4tfm\" (UID: \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\") " pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:14 crc kubenswrapper[4730]: I0930 11:28:14.919157 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41b27951-c0ca-41f7-bc1a-74ef582a04f1-catalog-content\") pod \"community-operators-k4tfm\" (UID: \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\") " pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:15 crc kubenswrapper[4730]: I0930 11:28:15.021302 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fg7k\" (UniqueName: \"kubernetes.io/projected/41b27951-c0ca-41f7-bc1a-74ef582a04f1-kube-api-access-2fg7k\") pod \"community-operators-k4tfm\" (UID: \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\") " pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:15 crc kubenswrapper[4730]: I0930 11:28:15.021403 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41b27951-c0ca-41f7-bc1a-74ef582a04f1-catalog-content\") pod \"community-operators-k4tfm\" (UID: \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\") " pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:15 crc kubenswrapper[4730]: I0930 11:28:15.021524 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41b27951-c0ca-41f7-bc1a-74ef582a04f1-utilities\") pod \"community-operators-k4tfm\" (UID: \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\") " pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:15 crc kubenswrapper[4730]: I0930 11:28:15.022163 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41b27951-c0ca-41f7-bc1a-74ef582a04f1-utilities\") pod \"community-operators-k4tfm\" (UID: \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\") " pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:15 crc kubenswrapper[4730]: I0930 11:28:15.022292 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41b27951-c0ca-41f7-bc1a-74ef582a04f1-catalog-content\") pod \"community-operators-k4tfm\" (UID: \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\") " pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:15 crc kubenswrapper[4730]: I0930 11:28:15.042175 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fg7k\" (UniqueName: \"kubernetes.io/projected/41b27951-c0ca-41f7-bc1a-74ef582a04f1-kube-api-access-2fg7k\") pod \"community-operators-k4tfm\" (UID: \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\") " pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:15 crc kubenswrapper[4730]: I0930 11:28:15.164387 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:15 crc kubenswrapper[4730]: I0930 11:28:15.732980 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k4tfm"] Sep 30 11:28:15 crc kubenswrapper[4730]: I0930 11:28:15.820802 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4tfm" event={"ID":"41b27951-c0ca-41f7-bc1a-74ef582a04f1","Type":"ContainerStarted","Data":"d6e553fb9e61150b8345c83970b8b8ee3d3a87198e9d946c2640739a5f20c32d"} Sep 30 11:28:16 crc kubenswrapper[4730]: I0930 11:28:16.838197 4730 generic.go:334] "Generic (PLEG): container finished" podID="41b27951-c0ca-41f7-bc1a-74ef582a04f1" containerID="89c80487ba4eea349cf7aceb0c83c81f2242cbb959be344e5385cc46708f3b10" exitCode=0 Sep 30 11:28:16 crc kubenswrapper[4730]: I0930 11:28:16.838254 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4tfm" event={"ID":"41b27951-c0ca-41f7-bc1a-74ef582a04f1","Type":"ContainerDied","Data":"89c80487ba4eea349cf7aceb0c83c81f2242cbb959be344e5385cc46708f3b10"} Sep 30 11:28:16 crc kubenswrapper[4730]: I0930 11:28:16.842737 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 11:28:18 crc kubenswrapper[4730]: I0930 11:28:18.859462 4730 generic.go:334] "Generic (PLEG): container finished" podID="41b27951-c0ca-41f7-bc1a-74ef582a04f1" containerID="5a8603550a39a3dd0ed300c5fd102e1567ccde1eeafbb328772090a583198f44" exitCode=0 Sep 30 11:28:18 crc kubenswrapper[4730]: I0930 11:28:18.859545 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4tfm" event={"ID":"41b27951-c0ca-41f7-bc1a-74ef582a04f1","Type":"ContainerDied","Data":"5a8603550a39a3dd0ed300c5fd102e1567ccde1eeafbb328772090a583198f44"} Sep 30 11:28:19 crc kubenswrapper[4730]: I0930 11:28:19.874703 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4tfm" event={"ID":"41b27951-c0ca-41f7-bc1a-74ef582a04f1","Type":"ContainerStarted","Data":"1bd0e7367b44539aedcfe96a32fd8a1fdc0ee25d8d1bb93a2af956b3583d8b8a"} Sep 30 11:28:19 crc kubenswrapper[4730]: I0930 11:28:19.897905 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-k4tfm" podStartSLOduration=3.419806865 podStartE2EDuration="5.897879925s" podCreationTimestamp="2025-09-30 11:28:14 +0000 UTC" firstStartedPulling="2025-09-30 11:28:16.842324137 +0000 UTC m=+5941.175584140" lastFinishedPulling="2025-09-30 11:28:19.320397197 +0000 UTC m=+5943.653657200" observedRunningTime="2025-09-30 11:28:19.891946478 +0000 UTC m=+5944.225206501" watchObservedRunningTime="2025-09-30 11:28:19.897879925 +0000 UTC m=+5944.231139938" Sep 30 11:28:25 crc kubenswrapper[4730]: I0930 11:28:25.165424 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:25 crc kubenswrapper[4730]: I0930 11:28:25.166317 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:25 crc kubenswrapper[4730]: I0930 11:28:25.230796 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:26 crc kubenswrapper[4730]: I0930 11:28:26.005814 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:26 crc kubenswrapper[4730]: I0930 11:28:26.059131 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k4tfm"] Sep 30 11:28:27 crc kubenswrapper[4730]: I0930 11:28:27.957959 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-k4tfm" podUID="41b27951-c0ca-41f7-bc1a-74ef582a04f1" containerName="registry-server" containerID="cri-o://1bd0e7367b44539aedcfe96a32fd8a1fdc0ee25d8d1bb93a2af956b3583d8b8a" gracePeriod=2 Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.399089 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.510030 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41b27951-c0ca-41f7-bc1a-74ef582a04f1-utilities\") pod \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\" (UID: \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\") " Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.510169 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41b27951-c0ca-41f7-bc1a-74ef582a04f1-catalog-content\") pod \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\" (UID: \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\") " Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.510212 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fg7k\" (UniqueName: \"kubernetes.io/projected/41b27951-c0ca-41f7-bc1a-74ef582a04f1-kube-api-access-2fg7k\") pod \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\" (UID: \"41b27951-c0ca-41f7-bc1a-74ef582a04f1\") " Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.510979 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41b27951-c0ca-41f7-bc1a-74ef582a04f1-utilities" (OuterVolumeSpecName: "utilities") pod "41b27951-c0ca-41f7-bc1a-74ef582a04f1" (UID: "41b27951-c0ca-41f7-bc1a-74ef582a04f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.517962 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41b27951-c0ca-41f7-bc1a-74ef582a04f1-kube-api-access-2fg7k" (OuterVolumeSpecName: "kube-api-access-2fg7k") pod "41b27951-c0ca-41f7-bc1a-74ef582a04f1" (UID: "41b27951-c0ca-41f7-bc1a-74ef582a04f1"). InnerVolumeSpecName "kube-api-access-2fg7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.612601 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41b27951-c0ca-41f7-bc1a-74ef582a04f1-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.612651 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fg7k\" (UniqueName: \"kubernetes.io/projected/41b27951-c0ca-41f7-bc1a-74ef582a04f1-kube-api-access-2fg7k\") on node \"crc\" DevicePath \"\"" Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.968939 4730 generic.go:334] "Generic (PLEG): container finished" podID="41b27951-c0ca-41f7-bc1a-74ef582a04f1" containerID="1bd0e7367b44539aedcfe96a32fd8a1fdc0ee25d8d1bb93a2af956b3583d8b8a" exitCode=0 Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.968987 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4tfm" event={"ID":"41b27951-c0ca-41f7-bc1a-74ef582a04f1","Type":"ContainerDied","Data":"1bd0e7367b44539aedcfe96a32fd8a1fdc0ee25d8d1bb93a2af956b3583d8b8a"} Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.969056 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4tfm" event={"ID":"41b27951-c0ca-41f7-bc1a-74ef582a04f1","Type":"ContainerDied","Data":"d6e553fb9e61150b8345c83970b8b8ee3d3a87198e9d946c2640739a5f20c32d"} Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.969061 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4tfm" Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.969084 4730 scope.go:117] "RemoveContainer" containerID="1bd0e7367b44539aedcfe96a32fd8a1fdc0ee25d8d1bb93a2af956b3583d8b8a" Sep 30 11:28:28 crc kubenswrapper[4730]: I0930 11:28:28.990751 4730 scope.go:117] "RemoveContainer" containerID="5a8603550a39a3dd0ed300c5fd102e1567ccde1eeafbb328772090a583198f44" Sep 30 11:28:29 crc kubenswrapper[4730]: I0930 11:28:29.014680 4730 scope.go:117] "RemoveContainer" containerID="89c80487ba4eea349cf7aceb0c83c81f2242cbb959be344e5385cc46708f3b10" Sep 30 11:28:29 crc kubenswrapper[4730]: I0930 11:28:29.082599 4730 scope.go:117] "RemoveContainer" containerID="1bd0e7367b44539aedcfe96a32fd8a1fdc0ee25d8d1bb93a2af956b3583d8b8a" Sep 30 11:28:29 crc kubenswrapper[4730]: E0930 11:28:29.083443 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bd0e7367b44539aedcfe96a32fd8a1fdc0ee25d8d1bb93a2af956b3583d8b8a\": container with ID starting with 1bd0e7367b44539aedcfe96a32fd8a1fdc0ee25d8d1bb93a2af956b3583d8b8a not found: ID does not exist" containerID="1bd0e7367b44539aedcfe96a32fd8a1fdc0ee25d8d1bb93a2af956b3583d8b8a" Sep 30 11:28:29 crc kubenswrapper[4730]: I0930 11:28:29.083545 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bd0e7367b44539aedcfe96a32fd8a1fdc0ee25d8d1bb93a2af956b3583d8b8a"} err="failed to get container status \"1bd0e7367b44539aedcfe96a32fd8a1fdc0ee25d8d1bb93a2af956b3583d8b8a\": rpc error: code = NotFound desc = could not find container \"1bd0e7367b44539aedcfe96a32fd8a1fdc0ee25d8d1bb93a2af956b3583d8b8a\": container with ID starting with 1bd0e7367b44539aedcfe96a32fd8a1fdc0ee25d8d1bb93a2af956b3583d8b8a not found: ID does not exist" Sep 30 11:28:29 crc kubenswrapper[4730]: I0930 11:28:29.083587 4730 scope.go:117] "RemoveContainer" containerID="5a8603550a39a3dd0ed300c5fd102e1567ccde1eeafbb328772090a583198f44" Sep 30 11:28:29 crc kubenswrapper[4730]: E0930 11:28:29.084099 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a8603550a39a3dd0ed300c5fd102e1567ccde1eeafbb328772090a583198f44\": container with ID starting with 5a8603550a39a3dd0ed300c5fd102e1567ccde1eeafbb328772090a583198f44 not found: ID does not exist" containerID="5a8603550a39a3dd0ed300c5fd102e1567ccde1eeafbb328772090a583198f44" Sep 30 11:28:29 crc kubenswrapper[4730]: I0930 11:28:29.084135 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a8603550a39a3dd0ed300c5fd102e1567ccde1eeafbb328772090a583198f44"} err="failed to get container status \"5a8603550a39a3dd0ed300c5fd102e1567ccde1eeafbb328772090a583198f44\": rpc error: code = NotFound desc = could not find container \"5a8603550a39a3dd0ed300c5fd102e1567ccde1eeafbb328772090a583198f44\": container with ID starting with 5a8603550a39a3dd0ed300c5fd102e1567ccde1eeafbb328772090a583198f44 not found: ID does not exist" Sep 30 11:28:29 crc kubenswrapper[4730]: I0930 11:28:29.084157 4730 scope.go:117] "RemoveContainer" containerID="89c80487ba4eea349cf7aceb0c83c81f2242cbb959be344e5385cc46708f3b10" Sep 30 11:28:29 crc kubenswrapper[4730]: E0930 11:28:29.084653 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89c80487ba4eea349cf7aceb0c83c81f2242cbb959be344e5385cc46708f3b10\": container with ID starting with 89c80487ba4eea349cf7aceb0c83c81f2242cbb959be344e5385cc46708f3b10 not found: ID does not exist" containerID="89c80487ba4eea349cf7aceb0c83c81f2242cbb959be344e5385cc46708f3b10" Sep 30 11:28:29 crc kubenswrapper[4730]: I0930 11:28:29.084730 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89c80487ba4eea349cf7aceb0c83c81f2242cbb959be344e5385cc46708f3b10"} err="failed to get container status \"89c80487ba4eea349cf7aceb0c83c81f2242cbb959be344e5385cc46708f3b10\": rpc error: code = NotFound desc = could not find container \"89c80487ba4eea349cf7aceb0c83c81f2242cbb959be344e5385cc46708f3b10\": container with ID starting with 89c80487ba4eea349cf7aceb0c83c81f2242cbb959be344e5385cc46708f3b10 not found: ID does not exist" Sep 30 11:28:29 crc kubenswrapper[4730]: I0930 11:28:29.140812 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41b27951-c0ca-41f7-bc1a-74ef582a04f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "41b27951-c0ca-41f7-bc1a-74ef582a04f1" (UID: "41b27951-c0ca-41f7-bc1a-74ef582a04f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:28:29 crc kubenswrapper[4730]: I0930 11:28:29.224061 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41b27951-c0ca-41f7-bc1a-74ef582a04f1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:28:29 crc kubenswrapper[4730]: I0930 11:28:29.319773 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k4tfm"] Sep 30 11:28:29 crc kubenswrapper[4730]: I0930 11:28:29.331588 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-k4tfm"] Sep 30 11:28:30 crc kubenswrapper[4730]: I0930 11:28:30.401307 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41b27951-c0ca-41f7-bc1a-74ef582a04f1" path="/var/lib/kubelet/pods/41b27951-c0ca-41f7-bc1a-74ef582a04f1/volumes" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.194528 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96"] Sep 30 11:30:00 crc kubenswrapper[4730]: E0930 11:30:00.195556 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41b27951-c0ca-41f7-bc1a-74ef582a04f1" containerName="registry-server" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.195573 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="41b27951-c0ca-41f7-bc1a-74ef582a04f1" containerName="registry-server" Sep 30 11:30:00 crc kubenswrapper[4730]: E0930 11:30:00.195621 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41b27951-c0ca-41f7-bc1a-74ef582a04f1" containerName="extract-utilities" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.195630 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="41b27951-c0ca-41f7-bc1a-74ef582a04f1" containerName="extract-utilities" Sep 30 11:30:00 crc kubenswrapper[4730]: E0930 11:30:00.195639 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41b27951-c0ca-41f7-bc1a-74ef582a04f1" containerName="extract-content" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.195647 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="41b27951-c0ca-41f7-bc1a-74ef582a04f1" containerName="extract-content" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.195905 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="41b27951-c0ca-41f7-bc1a-74ef582a04f1" containerName="registry-server" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.196804 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.199217 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.199496 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.221048 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96"] Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.296893 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bttlp\" (UniqueName: \"kubernetes.io/projected/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-kube-api-access-bttlp\") pod \"collect-profiles-29320530-6jk96\" (UID: \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.297009 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-secret-volume\") pod \"collect-profiles-29320530-6jk96\" (UID: \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.297033 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-config-volume\") pod \"collect-profiles-29320530-6jk96\" (UID: \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.399163 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-secret-volume\") pod \"collect-profiles-29320530-6jk96\" (UID: \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.399224 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-config-volume\") pod \"collect-profiles-29320530-6jk96\" (UID: \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.399391 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bttlp\" (UniqueName: \"kubernetes.io/projected/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-kube-api-access-bttlp\") pod \"collect-profiles-29320530-6jk96\" (UID: \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.400344 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-config-volume\") pod \"collect-profiles-29320530-6jk96\" (UID: \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.407892 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-secret-volume\") pod \"collect-profiles-29320530-6jk96\" (UID: \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.418149 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bttlp\" (UniqueName: \"kubernetes.io/projected/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-kube-api-access-bttlp\") pod \"collect-profiles-29320530-6jk96\" (UID: \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.525885 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" Sep 30 11:30:00 crc kubenswrapper[4730]: I0930 11:30:00.953119 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96"] Sep 30 11:30:01 crc kubenswrapper[4730]: I0930 11:30:01.952936 4730 generic.go:334] "Generic (PLEG): container finished" podID="0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd" containerID="ec758f053b8bbb48fa27b077b9249869022b88e2295765150294b9deb09b7e82" exitCode=0 Sep 30 11:30:01 crc kubenswrapper[4730]: I0930 11:30:01.953037 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" event={"ID":"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd","Type":"ContainerDied","Data":"ec758f053b8bbb48fa27b077b9249869022b88e2295765150294b9deb09b7e82"} Sep 30 11:30:01 crc kubenswrapper[4730]: I0930 11:30:01.954426 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" event={"ID":"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd","Type":"ContainerStarted","Data":"27950a75d65549efb0bf65c8337f8fb4bad35c84aa8f8aa5a1650ed67b1b2f5e"} Sep 30 11:30:03 crc kubenswrapper[4730]: I0930 11:30:03.393143 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" Sep 30 11:30:03 crc kubenswrapper[4730]: I0930 11:30:03.566372 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bttlp\" (UniqueName: \"kubernetes.io/projected/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-kube-api-access-bttlp\") pod \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\" (UID: \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\") " Sep 30 11:30:03 crc kubenswrapper[4730]: I0930 11:30:03.566515 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-secret-volume\") pod \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\" (UID: \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\") " Sep 30 11:30:03 crc kubenswrapper[4730]: I0930 11:30:03.566708 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-config-volume\") pod \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\" (UID: \"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd\") " Sep 30 11:30:03 crc kubenswrapper[4730]: I0930 11:30:03.568438 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-config-volume" (OuterVolumeSpecName: "config-volume") pod "0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd" (UID: "0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 11:30:03 crc kubenswrapper[4730]: I0930 11:30:03.574475 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd" (UID: "0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 11:30:03 crc kubenswrapper[4730]: I0930 11:30:03.574913 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-kube-api-access-bttlp" (OuterVolumeSpecName: "kube-api-access-bttlp") pod "0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd" (UID: "0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd"). InnerVolumeSpecName "kube-api-access-bttlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:30:03 crc kubenswrapper[4730]: I0930 11:30:03.669723 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bttlp\" (UniqueName: \"kubernetes.io/projected/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-kube-api-access-bttlp\") on node \"crc\" DevicePath \"\"" Sep 30 11:30:03 crc kubenswrapper[4730]: I0930 11:30:03.669773 4730 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 11:30:03 crc kubenswrapper[4730]: I0930 11:30:03.669792 4730 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 11:30:03 crc kubenswrapper[4730]: I0930 11:30:03.980388 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" event={"ID":"0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd","Type":"ContainerDied","Data":"27950a75d65549efb0bf65c8337f8fb4bad35c84aa8f8aa5a1650ed67b1b2f5e"} Sep 30 11:30:03 crc kubenswrapper[4730]: I0930 11:30:03.980750 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27950a75d65549efb0bf65c8337f8fb4bad35c84aa8f8aa5a1650ed67b1b2f5e" Sep 30 11:30:03 crc kubenswrapper[4730]: I0930 11:30:03.980563 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320530-6jk96" Sep 30 11:30:04 crc kubenswrapper[4730]: I0930 11:30:04.478172 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74"] Sep 30 11:30:04 crc kubenswrapper[4730]: I0930 11:30:04.488354 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320485-nlf74"] Sep 30 11:30:06 crc kubenswrapper[4730]: I0930 11:30:06.395427 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90e7dbe7-4eee-4da1-9b22-5d290321a79b" path="/var/lib/kubelet/pods/90e7dbe7-4eee-4da1-9b22-5d290321a79b/volumes" Sep 30 11:30:30 crc kubenswrapper[4730]: I0930 11:30:30.905525 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5gwsg"] Sep 30 11:30:30 crc kubenswrapper[4730]: E0930 11:30:30.907150 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd" containerName="collect-profiles" Sep 30 11:30:30 crc kubenswrapper[4730]: I0930 11:30:30.907180 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd" containerName="collect-profiles" Sep 30 11:30:30 crc kubenswrapper[4730]: I0930 11:30:30.907664 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ef3fdbe-48cb-49ba-a3ba-3db95accf3fd" containerName="collect-profiles" Sep 30 11:30:30 crc kubenswrapper[4730]: I0930 11:30:30.910753 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:30 crc kubenswrapper[4730]: I0930 11:30:30.913304 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gwsg"] Sep 30 11:30:31 crc kubenswrapper[4730]: I0930 11:30:31.003253 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62w4g\" (UniqueName: \"kubernetes.io/projected/cda461b5-913d-4eb1-bcb4-4b469fccd596-kube-api-access-62w4g\") pod \"redhat-marketplace-5gwsg\" (UID: \"cda461b5-913d-4eb1-bcb4-4b469fccd596\") " pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:31 crc kubenswrapper[4730]: I0930 11:30:31.003362 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cda461b5-913d-4eb1-bcb4-4b469fccd596-utilities\") pod \"redhat-marketplace-5gwsg\" (UID: \"cda461b5-913d-4eb1-bcb4-4b469fccd596\") " pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:31 crc kubenswrapper[4730]: I0930 11:30:31.003415 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cda461b5-913d-4eb1-bcb4-4b469fccd596-catalog-content\") pod \"redhat-marketplace-5gwsg\" (UID: \"cda461b5-913d-4eb1-bcb4-4b469fccd596\") " pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:31 crc kubenswrapper[4730]: I0930 11:30:31.105889 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62w4g\" (UniqueName: \"kubernetes.io/projected/cda461b5-913d-4eb1-bcb4-4b469fccd596-kube-api-access-62w4g\") pod \"redhat-marketplace-5gwsg\" (UID: \"cda461b5-913d-4eb1-bcb4-4b469fccd596\") " pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:31 crc kubenswrapper[4730]: I0930 11:30:31.105996 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cda461b5-913d-4eb1-bcb4-4b469fccd596-utilities\") pod \"redhat-marketplace-5gwsg\" (UID: \"cda461b5-913d-4eb1-bcb4-4b469fccd596\") " pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:31 crc kubenswrapper[4730]: I0930 11:30:31.106051 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cda461b5-913d-4eb1-bcb4-4b469fccd596-catalog-content\") pod \"redhat-marketplace-5gwsg\" (UID: \"cda461b5-913d-4eb1-bcb4-4b469fccd596\") " pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:31 crc kubenswrapper[4730]: I0930 11:30:31.106802 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cda461b5-913d-4eb1-bcb4-4b469fccd596-utilities\") pod \"redhat-marketplace-5gwsg\" (UID: \"cda461b5-913d-4eb1-bcb4-4b469fccd596\") " pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:31 crc kubenswrapper[4730]: I0930 11:30:31.106815 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cda461b5-913d-4eb1-bcb4-4b469fccd596-catalog-content\") pod \"redhat-marketplace-5gwsg\" (UID: \"cda461b5-913d-4eb1-bcb4-4b469fccd596\") " pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:31 crc kubenswrapper[4730]: I0930 11:30:31.124231 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62w4g\" (UniqueName: \"kubernetes.io/projected/cda461b5-913d-4eb1-bcb4-4b469fccd596-kube-api-access-62w4g\") pod \"redhat-marketplace-5gwsg\" (UID: \"cda461b5-913d-4eb1-bcb4-4b469fccd596\") " pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:31 crc kubenswrapper[4730]: I0930 11:30:31.246934 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:31 crc kubenswrapper[4730]: I0930 11:30:31.732465 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gwsg"] Sep 30 11:30:31 crc kubenswrapper[4730]: W0930 11:30:31.740894 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcda461b5_913d_4eb1_bcb4_4b469fccd596.slice/crio-872bcd72d432df0499fa04226e8ecd04be525edd0c90639e8a82b072c0caf851 WatchSource:0}: Error finding container 872bcd72d432df0499fa04226e8ecd04be525edd0c90639e8a82b072c0caf851: Status 404 returned error can't find the container with id 872bcd72d432df0499fa04226e8ecd04be525edd0c90639e8a82b072c0caf851 Sep 30 11:30:32 crc kubenswrapper[4730]: I0930 11:30:32.270519 4730 generic.go:334] "Generic (PLEG): container finished" podID="cda461b5-913d-4eb1-bcb4-4b469fccd596" containerID="d5a6d51bed6fd4aa62fe6e08eb236110f5927d9d124bbd977adb645852a908d9" exitCode=0 Sep 30 11:30:32 crc kubenswrapper[4730]: I0930 11:30:32.270741 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gwsg" event={"ID":"cda461b5-913d-4eb1-bcb4-4b469fccd596","Type":"ContainerDied","Data":"d5a6d51bed6fd4aa62fe6e08eb236110f5927d9d124bbd977adb645852a908d9"} Sep 30 11:30:32 crc kubenswrapper[4730]: I0930 11:30:32.270941 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gwsg" event={"ID":"cda461b5-913d-4eb1-bcb4-4b469fccd596","Type":"ContainerStarted","Data":"872bcd72d432df0499fa04226e8ecd04be525edd0c90639e8a82b072c0caf851"} Sep 30 11:30:32 crc kubenswrapper[4730]: I0930 11:30:32.336584 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:30:32 crc kubenswrapper[4730]: I0930 11:30:32.336657 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:30:33 crc kubenswrapper[4730]: I0930 11:30:33.157565 4730 scope.go:117] "RemoveContainer" containerID="8697daaa29d53d4fb75aa2dd164dc9f933e98e2f64431bdbdaa620bb44691672" Sep 30 11:30:34 crc kubenswrapper[4730]: I0930 11:30:34.295469 4730 generic.go:334] "Generic (PLEG): container finished" podID="cda461b5-913d-4eb1-bcb4-4b469fccd596" containerID="d1d7b7db803e33a72cfa4d7adbb18a6057ff6a5fd0e451c407a47b0ae49b31a9" exitCode=0 Sep 30 11:30:34 crc kubenswrapper[4730]: I0930 11:30:34.295532 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gwsg" event={"ID":"cda461b5-913d-4eb1-bcb4-4b469fccd596","Type":"ContainerDied","Data":"d1d7b7db803e33a72cfa4d7adbb18a6057ff6a5fd0e451c407a47b0ae49b31a9"} Sep 30 11:30:35 crc kubenswrapper[4730]: I0930 11:30:35.312044 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gwsg" event={"ID":"cda461b5-913d-4eb1-bcb4-4b469fccd596","Type":"ContainerStarted","Data":"75f4026abab7a71b43eeebf90ab749b9409fcc004933e1fd61e984e658264a9e"} Sep 30 11:30:35 crc kubenswrapper[4730]: I0930 11:30:35.341228 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5gwsg" podStartSLOduration=2.846803983 podStartE2EDuration="5.341205872s" podCreationTimestamp="2025-09-30 11:30:30 +0000 UTC" firstStartedPulling="2025-09-30 11:30:32.275280531 +0000 UTC m=+6076.608540575" lastFinishedPulling="2025-09-30 11:30:34.769682461 +0000 UTC m=+6079.102942464" observedRunningTime="2025-09-30 11:30:35.330353166 +0000 UTC m=+6079.663613159" watchObservedRunningTime="2025-09-30 11:30:35.341205872 +0000 UTC m=+6079.674465875" Sep 30 11:30:41 crc kubenswrapper[4730]: I0930 11:30:41.247332 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:41 crc kubenswrapper[4730]: I0930 11:30:41.248090 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:41 crc kubenswrapper[4730]: I0930 11:30:41.305985 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:41 crc kubenswrapper[4730]: I0930 11:30:41.419733 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:41 crc kubenswrapper[4730]: I0930 11:30:41.546363 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gwsg"] Sep 30 11:30:43 crc kubenswrapper[4730]: I0930 11:30:43.393959 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5gwsg" podUID="cda461b5-913d-4eb1-bcb4-4b469fccd596" containerName="registry-server" containerID="cri-o://75f4026abab7a71b43eeebf90ab749b9409fcc004933e1fd61e984e658264a9e" gracePeriod=2 Sep 30 11:30:43 crc kubenswrapper[4730]: I0930 11:30:43.880601 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:43 crc kubenswrapper[4730]: I0930 11:30:43.976346 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62w4g\" (UniqueName: \"kubernetes.io/projected/cda461b5-913d-4eb1-bcb4-4b469fccd596-kube-api-access-62w4g\") pod \"cda461b5-913d-4eb1-bcb4-4b469fccd596\" (UID: \"cda461b5-913d-4eb1-bcb4-4b469fccd596\") " Sep 30 11:30:43 crc kubenswrapper[4730]: I0930 11:30:43.976563 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cda461b5-913d-4eb1-bcb4-4b469fccd596-catalog-content\") pod \"cda461b5-913d-4eb1-bcb4-4b469fccd596\" (UID: \"cda461b5-913d-4eb1-bcb4-4b469fccd596\") " Sep 30 11:30:43 crc kubenswrapper[4730]: I0930 11:30:43.976690 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cda461b5-913d-4eb1-bcb4-4b469fccd596-utilities\") pod \"cda461b5-913d-4eb1-bcb4-4b469fccd596\" (UID: \"cda461b5-913d-4eb1-bcb4-4b469fccd596\") " Sep 30 11:30:43 crc kubenswrapper[4730]: I0930 11:30:43.977691 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cda461b5-913d-4eb1-bcb4-4b469fccd596-utilities" (OuterVolumeSpecName: "utilities") pod "cda461b5-913d-4eb1-bcb4-4b469fccd596" (UID: "cda461b5-913d-4eb1-bcb4-4b469fccd596"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:30:43 crc kubenswrapper[4730]: I0930 11:30:43.985001 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cda461b5-913d-4eb1-bcb4-4b469fccd596-kube-api-access-62w4g" (OuterVolumeSpecName: "kube-api-access-62w4g") pod "cda461b5-913d-4eb1-bcb4-4b469fccd596" (UID: "cda461b5-913d-4eb1-bcb4-4b469fccd596"). InnerVolumeSpecName "kube-api-access-62w4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:30:43 crc kubenswrapper[4730]: I0930 11:30:43.988838 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cda461b5-913d-4eb1-bcb4-4b469fccd596-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cda461b5-913d-4eb1-bcb4-4b469fccd596" (UID: "cda461b5-913d-4eb1-bcb4-4b469fccd596"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.079137 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62w4g\" (UniqueName: \"kubernetes.io/projected/cda461b5-913d-4eb1-bcb4-4b469fccd596-kube-api-access-62w4g\") on node \"crc\" DevicePath \"\"" Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.079172 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cda461b5-913d-4eb1-bcb4-4b469fccd596-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.079183 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cda461b5-913d-4eb1-bcb4-4b469fccd596-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.409539 4730 generic.go:334] "Generic (PLEG): container finished" podID="cda461b5-913d-4eb1-bcb4-4b469fccd596" containerID="75f4026abab7a71b43eeebf90ab749b9409fcc004933e1fd61e984e658264a9e" exitCode=0 Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.409582 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gwsg" event={"ID":"cda461b5-913d-4eb1-bcb4-4b469fccd596","Type":"ContainerDied","Data":"75f4026abab7a71b43eeebf90ab749b9409fcc004933e1fd61e984e658264a9e"} Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.409637 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gwsg" event={"ID":"cda461b5-913d-4eb1-bcb4-4b469fccd596","Type":"ContainerDied","Data":"872bcd72d432df0499fa04226e8ecd04be525edd0c90639e8a82b072c0caf851"} Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.409594 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gwsg" Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.409658 4730 scope.go:117] "RemoveContainer" containerID="75f4026abab7a71b43eeebf90ab749b9409fcc004933e1fd61e984e658264a9e" Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.450976 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gwsg"] Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.452867 4730 scope.go:117] "RemoveContainer" containerID="d1d7b7db803e33a72cfa4d7adbb18a6057ff6a5fd0e451c407a47b0ae49b31a9" Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.459632 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gwsg"] Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.480246 4730 scope.go:117] "RemoveContainer" containerID="d5a6d51bed6fd4aa62fe6e08eb236110f5927d9d124bbd977adb645852a908d9" Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.516219 4730 scope.go:117] "RemoveContainer" containerID="75f4026abab7a71b43eeebf90ab749b9409fcc004933e1fd61e984e658264a9e" Sep 30 11:30:44 crc kubenswrapper[4730]: E0930 11:30:44.516593 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75f4026abab7a71b43eeebf90ab749b9409fcc004933e1fd61e984e658264a9e\": container with ID starting with 75f4026abab7a71b43eeebf90ab749b9409fcc004933e1fd61e984e658264a9e not found: ID does not exist" containerID="75f4026abab7a71b43eeebf90ab749b9409fcc004933e1fd61e984e658264a9e" Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.516655 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75f4026abab7a71b43eeebf90ab749b9409fcc004933e1fd61e984e658264a9e"} err="failed to get container status \"75f4026abab7a71b43eeebf90ab749b9409fcc004933e1fd61e984e658264a9e\": rpc error: code = NotFound desc = could not find container \"75f4026abab7a71b43eeebf90ab749b9409fcc004933e1fd61e984e658264a9e\": container with ID starting with 75f4026abab7a71b43eeebf90ab749b9409fcc004933e1fd61e984e658264a9e not found: ID does not exist" Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.516684 4730 scope.go:117] "RemoveContainer" containerID="d1d7b7db803e33a72cfa4d7adbb18a6057ff6a5fd0e451c407a47b0ae49b31a9" Sep 30 11:30:44 crc kubenswrapper[4730]: E0930 11:30:44.517041 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1d7b7db803e33a72cfa4d7adbb18a6057ff6a5fd0e451c407a47b0ae49b31a9\": container with ID starting with d1d7b7db803e33a72cfa4d7adbb18a6057ff6a5fd0e451c407a47b0ae49b31a9 not found: ID does not exist" containerID="d1d7b7db803e33a72cfa4d7adbb18a6057ff6a5fd0e451c407a47b0ae49b31a9" Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.517086 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1d7b7db803e33a72cfa4d7adbb18a6057ff6a5fd0e451c407a47b0ae49b31a9"} err="failed to get container status \"d1d7b7db803e33a72cfa4d7adbb18a6057ff6a5fd0e451c407a47b0ae49b31a9\": rpc error: code = NotFound desc = could not find container \"d1d7b7db803e33a72cfa4d7adbb18a6057ff6a5fd0e451c407a47b0ae49b31a9\": container with ID starting with d1d7b7db803e33a72cfa4d7adbb18a6057ff6a5fd0e451c407a47b0ae49b31a9 not found: ID does not exist" Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.517111 4730 scope.go:117] "RemoveContainer" containerID="d5a6d51bed6fd4aa62fe6e08eb236110f5927d9d124bbd977adb645852a908d9" Sep 30 11:30:44 crc kubenswrapper[4730]: E0930 11:30:44.517384 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5a6d51bed6fd4aa62fe6e08eb236110f5927d9d124bbd977adb645852a908d9\": container with ID starting with d5a6d51bed6fd4aa62fe6e08eb236110f5927d9d124bbd977adb645852a908d9 not found: ID does not exist" containerID="d5a6d51bed6fd4aa62fe6e08eb236110f5927d9d124bbd977adb645852a908d9" Sep 30 11:30:44 crc kubenswrapper[4730]: I0930 11:30:44.517410 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5a6d51bed6fd4aa62fe6e08eb236110f5927d9d124bbd977adb645852a908d9"} err="failed to get container status \"d5a6d51bed6fd4aa62fe6e08eb236110f5927d9d124bbd977adb645852a908d9\": rpc error: code = NotFound desc = could not find container \"d5a6d51bed6fd4aa62fe6e08eb236110f5927d9d124bbd977adb645852a908d9\": container with ID starting with d5a6d51bed6fd4aa62fe6e08eb236110f5927d9d124bbd977adb645852a908d9 not found: ID does not exist" Sep 30 11:30:46 crc kubenswrapper[4730]: I0930 11:30:46.393967 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cda461b5-913d-4eb1-bcb4-4b469fccd596" path="/var/lib/kubelet/pods/cda461b5-913d-4eb1-bcb4-4b469fccd596/volumes" Sep 30 11:31:02 crc kubenswrapper[4730]: I0930 11:31:02.336763 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:31:02 crc kubenswrapper[4730]: I0930 11:31:02.337468 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:31:32 crc kubenswrapper[4730]: I0930 11:31:32.336918 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:31:32 crc kubenswrapper[4730]: I0930 11:31:32.337549 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:31:32 crc kubenswrapper[4730]: I0930 11:31:32.337641 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 11:31:32 crc kubenswrapper[4730]: I0930 11:31:32.338664 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"949efe096ebda614ec0ed66c9871e458cfa45addba67825624556af6913f3633"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 11:31:32 crc kubenswrapper[4730]: I0930 11:31:32.338763 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://949efe096ebda614ec0ed66c9871e458cfa45addba67825624556af6913f3633" gracePeriod=600 Sep 30 11:31:32 crc kubenswrapper[4730]: I0930 11:31:32.975981 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="949efe096ebda614ec0ed66c9871e458cfa45addba67825624556af6913f3633" exitCode=0 Sep 30 11:31:32 crc kubenswrapper[4730]: I0930 11:31:32.976037 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"949efe096ebda614ec0ed66c9871e458cfa45addba67825624556af6913f3633"} Sep 30 11:31:32 crc kubenswrapper[4730]: I0930 11:31:32.976662 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b"} Sep 30 11:31:32 crc kubenswrapper[4730]: I0930 11:31:32.976686 4730 scope.go:117] "RemoveContainer" containerID="36d8443318fd4677f87c68f1ea9da04f7cf043d37e0f251d71cbee455d31fe35" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.666709 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h5ckt"] Sep 30 11:31:41 crc kubenswrapper[4730]: E0930 11:31:41.668448 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cda461b5-913d-4eb1-bcb4-4b469fccd596" containerName="extract-utilities" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.668467 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cda461b5-913d-4eb1-bcb4-4b469fccd596" containerName="extract-utilities" Sep 30 11:31:41 crc kubenswrapper[4730]: E0930 11:31:41.668516 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cda461b5-913d-4eb1-bcb4-4b469fccd596" containerName="registry-server" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.668525 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cda461b5-913d-4eb1-bcb4-4b469fccd596" containerName="registry-server" Sep 30 11:31:41 crc kubenswrapper[4730]: E0930 11:31:41.668540 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cda461b5-913d-4eb1-bcb4-4b469fccd596" containerName="extract-content" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.668547 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cda461b5-913d-4eb1-bcb4-4b469fccd596" containerName="extract-content" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.668972 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="cda461b5-913d-4eb1-bcb4-4b469fccd596" containerName="registry-server" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.671474 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.716077 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h5ckt"] Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.819369 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9xwj\" (UniqueName: \"kubernetes.io/projected/0d905d20-bacc-46e6-83c0-c7e685cf936d-kube-api-access-q9xwj\") pod \"certified-operators-h5ckt\" (UID: \"0d905d20-bacc-46e6-83c0-c7e685cf936d\") " pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.819416 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d905d20-bacc-46e6-83c0-c7e685cf936d-utilities\") pod \"certified-operators-h5ckt\" (UID: \"0d905d20-bacc-46e6-83c0-c7e685cf936d\") " pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.819731 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d905d20-bacc-46e6-83c0-c7e685cf936d-catalog-content\") pod \"certified-operators-h5ckt\" (UID: \"0d905d20-bacc-46e6-83c0-c7e685cf936d\") " pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.921809 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d905d20-bacc-46e6-83c0-c7e685cf936d-utilities\") pod \"certified-operators-h5ckt\" (UID: \"0d905d20-bacc-46e6-83c0-c7e685cf936d\") " pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.921954 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d905d20-bacc-46e6-83c0-c7e685cf936d-catalog-content\") pod \"certified-operators-h5ckt\" (UID: \"0d905d20-bacc-46e6-83c0-c7e685cf936d\") " pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.922045 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9xwj\" (UniqueName: \"kubernetes.io/projected/0d905d20-bacc-46e6-83c0-c7e685cf936d-kube-api-access-q9xwj\") pod \"certified-operators-h5ckt\" (UID: \"0d905d20-bacc-46e6-83c0-c7e685cf936d\") " pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.922292 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d905d20-bacc-46e6-83c0-c7e685cf936d-utilities\") pod \"certified-operators-h5ckt\" (UID: \"0d905d20-bacc-46e6-83c0-c7e685cf936d\") " pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.922434 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d905d20-bacc-46e6-83c0-c7e685cf936d-catalog-content\") pod \"certified-operators-h5ckt\" (UID: \"0d905d20-bacc-46e6-83c0-c7e685cf936d\") " pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.953419 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9xwj\" (UniqueName: \"kubernetes.io/projected/0d905d20-bacc-46e6-83c0-c7e685cf936d-kube-api-access-q9xwj\") pod \"certified-operators-h5ckt\" (UID: \"0d905d20-bacc-46e6-83c0-c7e685cf936d\") " pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:31:41 crc kubenswrapper[4730]: I0930 11:31:41.993884 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:31:42 crc kubenswrapper[4730]: I0930 11:31:42.522008 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h5ckt"] Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.066594 4730 generic.go:334] "Generic (PLEG): container finished" podID="0d905d20-bacc-46e6-83c0-c7e685cf936d" containerID="15c6752b6f2dab0794594f1a951d69b9b417a3e139e4c51370413a683136e87f" exitCode=0 Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.066697 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5ckt" event={"ID":"0d905d20-bacc-46e6-83c0-c7e685cf936d","Type":"ContainerDied","Data":"15c6752b6f2dab0794594f1a951d69b9b417a3e139e4c51370413a683136e87f"} Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.066876 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5ckt" event={"ID":"0d905d20-bacc-46e6-83c0-c7e685cf936d","Type":"ContainerStarted","Data":"f874b80003b302ff38e96febbfbed7b3f590ef6e0c93cbb3e464dbc33a79312c"} Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.440852 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8t7p4"] Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.443654 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.450430 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8t7p4"] Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.558653 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b99846a-b02f-4eaa-9751-85c0edbdc167-catalog-content\") pod \"redhat-operators-8t7p4\" (UID: \"3b99846a-b02f-4eaa-9751-85c0edbdc167\") " pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.558719 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rtdg\" (UniqueName: \"kubernetes.io/projected/3b99846a-b02f-4eaa-9751-85c0edbdc167-kube-api-access-8rtdg\") pod \"redhat-operators-8t7p4\" (UID: \"3b99846a-b02f-4eaa-9751-85c0edbdc167\") " pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.559166 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b99846a-b02f-4eaa-9751-85c0edbdc167-utilities\") pod \"redhat-operators-8t7p4\" (UID: \"3b99846a-b02f-4eaa-9751-85c0edbdc167\") " pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.661349 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b99846a-b02f-4eaa-9751-85c0edbdc167-utilities\") pod \"redhat-operators-8t7p4\" (UID: \"3b99846a-b02f-4eaa-9751-85c0edbdc167\") " pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.661409 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b99846a-b02f-4eaa-9751-85c0edbdc167-catalog-content\") pod \"redhat-operators-8t7p4\" (UID: \"3b99846a-b02f-4eaa-9751-85c0edbdc167\") " pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.661442 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rtdg\" (UniqueName: \"kubernetes.io/projected/3b99846a-b02f-4eaa-9751-85c0edbdc167-kube-api-access-8rtdg\") pod \"redhat-operators-8t7p4\" (UID: \"3b99846a-b02f-4eaa-9751-85c0edbdc167\") " pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.662422 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b99846a-b02f-4eaa-9751-85c0edbdc167-catalog-content\") pod \"redhat-operators-8t7p4\" (UID: \"3b99846a-b02f-4eaa-9751-85c0edbdc167\") " pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.662513 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b99846a-b02f-4eaa-9751-85c0edbdc167-utilities\") pod \"redhat-operators-8t7p4\" (UID: \"3b99846a-b02f-4eaa-9751-85c0edbdc167\") " pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.684091 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rtdg\" (UniqueName: \"kubernetes.io/projected/3b99846a-b02f-4eaa-9751-85c0edbdc167-kube-api-access-8rtdg\") pod \"redhat-operators-8t7p4\" (UID: \"3b99846a-b02f-4eaa-9751-85c0edbdc167\") " pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:31:43 crc kubenswrapper[4730]: I0930 11:31:43.771276 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:31:44 crc kubenswrapper[4730]: I0930 11:31:44.080966 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5ckt" event={"ID":"0d905d20-bacc-46e6-83c0-c7e685cf936d","Type":"ContainerStarted","Data":"b92675fa843656434df3ad31f5b54b81efcee8d114007da4648ce0e885fde481"} Sep 30 11:31:44 crc kubenswrapper[4730]: I0930 11:31:44.274915 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8t7p4"] Sep 30 11:31:44 crc kubenswrapper[4730]: W0930 11:31:44.275163 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b99846a_b02f_4eaa_9751_85c0edbdc167.slice/crio-02d98bab66b92e36d12965476a7850fd1ce78882a0d980f80548473a6e19df76 WatchSource:0}: Error finding container 02d98bab66b92e36d12965476a7850fd1ce78882a0d980f80548473a6e19df76: Status 404 returned error can't find the container with id 02d98bab66b92e36d12965476a7850fd1ce78882a0d980f80548473a6e19df76 Sep 30 11:31:45 crc kubenswrapper[4730]: I0930 11:31:45.091622 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t7p4" event={"ID":"3b99846a-b02f-4eaa-9751-85c0edbdc167","Type":"ContainerStarted","Data":"108644ca8d68f887b4c103a83b6b3f46cdc8fb317eac1bf66c9f575e77a7ee46"} Sep 30 11:31:45 crc kubenswrapper[4730]: I0930 11:31:45.092110 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t7p4" event={"ID":"3b99846a-b02f-4eaa-9751-85c0edbdc167","Type":"ContainerStarted","Data":"02d98bab66b92e36d12965476a7850fd1ce78882a0d980f80548473a6e19df76"} Sep 30 11:31:45 crc kubenswrapper[4730]: I0930 11:31:45.094778 4730 generic.go:334] "Generic (PLEG): container finished" podID="0d905d20-bacc-46e6-83c0-c7e685cf936d" containerID="b92675fa843656434df3ad31f5b54b81efcee8d114007da4648ce0e885fde481" exitCode=0 Sep 30 11:31:45 crc kubenswrapper[4730]: I0930 11:31:45.094819 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5ckt" event={"ID":"0d905d20-bacc-46e6-83c0-c7e685cf936d","Type":"ContainerDied","Data":"b92675fa843656434df3ad31f5b54b81efcee8d114007da4648ce0e885fde481"} Sep 30 11:31:46 crc kubenswrapper[4730]: I0930 11:31:46.108493 4730 generic.go:334] "Generic (PLEG): container finished" podID="3b99846a-b02f-4eaa-9751-85c0edbdc167" containerID="108644ca8d68f887b4c103a83b6b3f46cdc8fb317eac1bf66c9f575e77a7ee46" exitCode=0 Sep 30 11:31:46 crc kubenswrapper[4730]: I0930 11:31:46.108575 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t7p4" event={"ID":"3b99846a-b02f-4eaa-9751-85c0edbdc167","Type":"ContainerDied","Data":"108644ca8d68f887b4c103a83b6b3f46cdc8fb317eac1bf66c9f575e77a7ee46"} Sep 30 11:31:46 crc kubenswrapper[4730]: I0930 11:31:46.114289 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5ckt" event={"ID":"0d905d20-bacc-46e6-83c0-c7e685cf936d","Type":"ContainerStarted","Data":"8f03c67b178e4e92fcd43f267119329af6402da8b53dc2dfeb5b7891a2fa3a3d"} Sep 30 11:31:46 crc kubenswrapper[4730]: I0930 11:31:46.152412 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h5ckt" podStartSLOduration=2.648806424 podStartE2EDuration="5.152395745s" podCreationTimestamp="2025-09-30 11:31:41 +0000 UTC" firstStartedPulling="2025-09-30 11:31:43.06877486 +0000 UTC m=+6147.402034853" lastFinishedPulling="2025-09-30 11:31:45.572364181 +0000 UTC m=+6149.905624174" observedRunningTime="2025-09-30 11:31:46.151366168 +0000 UTC m=+6150.484626161" watchObservedRunningTime="2025-09-30 11:31:46.152395745 +0000 UTC m=+6150.485655738" Sep 30 11:31:47 crc kubenswrapper[4730]: I0930 11:31:47.125161 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t7p4" event={"ID":"3b99846a-b02f-4eaa-9751-85c0edbdc167","Type":"ContainerStarted","Data":"4b70d0111659c67c5d394be94cc8b0940e230639110744ac13ba769198ffcd71"} Sep 30 11:31:51 crc kubenswrapper[4730]: I0930 11:31:51.996438 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:31:51 crc kubenswrapper[4730]: I0930 11:31:51.996916 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:31:52 crc kubenswrapper[4730]: I0930 11:31:52.167603 4730 generic.go:334] "Generic (PLEG): container finished" podID="3b99846a-b02f-4eaa-9751-85c0edbdc167" containerID="4b70d0111659c67c5d394be94cc8b0940e230639110744ac13ba769198ffcd71" exitCode=0 Sep 30 11:31:52 crc kubenswrapper[4730]: I0930 11:31:52.167646 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t7p4" event={"ID":"3b99846a-b02f-4eaa-9751-85c0edbdc167","Type":"ContainerDied","Data":"4b70d0111659c67c5d394be94cc8b0940e230639110744ac13ba769198ffcd71"} Sep 30 11:31:53 crc kubenswrapper[4730]: I0930 11:31:53.050037 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-h5ckt" podUID="0d905d20-bacc-46e6-83c0-c7e685cf936d" containerName="registry-server" probeResult="failure" output=< Sep 30 11:31:53 crc kubenswrapper[4730]: timeout: failed to connect service ":50051" within 1s Sep 30 11:31:53 crc kubenswrapper[4730]: > Sep 30 11:31:53 crc kubenswrapper[4730]: I0930 11:31:53.181856 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t7p4" event={"ID":"3b99846a-b02f-4eaa-9751-85c0edbdc167","Type":"ContainerStarted","Data":"7a9490b8eb6873644faf47f8595c0e24801d64dd1b436dcf27094c913519e4ae"} Sep 30 11:31:53 crc kubenswrapper[4730]: I0930 11:31:53.228068 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8t7p4" podStartSLOduration=3.672552805 podStartE2EDuration="10.228048066s" podCreationTimestamp="2025-09-30 11:31:43 +0000 UTC" firstStartedPulling="2025-09-30 11:31:46.110488303 +0000 UTC m=+6150.443748316" lastFinishedPulling="2025-09-30 11:31:52.665983584 +0000 UTC m=+6156.999243577" observedRunningTime="2025-09-30 11:31:53.216984435 +0000 UTC m=+6157.550244428" watchObservedRunningTime="2025-09-30 11:31:53.228048066 +0000 UTC m=+6157.561308059" Sep 30 11:31:53 crc kubenswrapper[4730]: I0930 11:31:53.771918 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:31:53 crc kubenswrapper[4730]: I0930 11:31:53.772111 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:31:54 crc kubenswrapper[4730]: I0930 11:31:54.832766 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8t7p4" podUID="3b99846a-b02f-4eaa-9751-85c0edbdc167" containerName="registry-server" probeResult="failure" output=< Sep 30 11:31:54 crc kubenswrapper[4730]: timeout: failed to connect service ":50051" within 1s Sep 30 11:31:54 crc kubenswrapper[4730]: > Sep 30 11:32:02 crc kubenswrapper[4730]: I0930 11:32:02.054475 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:32:02 crc kubenswrapper[4730]: I0930 11:32:02.115026 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:32:02 crc kubenswrapper[4730]: I0930 11:32:02.304240 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h5ckt"] Sep 30 11:32:03 crc kubenswrapper[4730]: I0930 11:32:03.279820 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h5ckt" podUID="0d905d20-bacc-46e6-83c0-c7e685cf936d" containerName="registry-server" containerID="cri-o://8f03c67b178e4e92fcd43f267119329af6402da8b53dc2dfeb5b7891a2fa3a3d" gracePeriod=2 Sep 30 11:32:03 crc kubenswrapper[4730]: I0930 11:32:03.807889 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:32:03 crc kubenswrapper[4730]: I0930 11:32:03.982730 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d905d20-bacc-46e6-83c0-c7e685cf936d-utilities\") pod \"0d905d20-bacc-46e6-83c0-c7e685cf936d\" (UID: \"0d905d20-bacc-46e6-83c0-c7e685cf936d\") " Sep 30 11:32:03 crc kubenswrapper[4730]: I0930 11:32:03.982852 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9xwj\" (UniqueName: \"kubernetes.io/projected/0d905d20-bacc-46e6-83c0-c7e685cf936d-kube-api-access-q9xwj\") pod \"0d905d20-bacc-46e6-83c0-c7e685cf936d\" (UID: \"0d905d20-bacc-46e6-83c0-c7e685cf936d\") " Sep 30 11:32:03 crc kubenswrapper[4730]: I0930 11:32:03.982880 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d905d20-bacc-46e6-83c0-c7e685cf936d-catalog-content\") pod \"0d905d20-bacc-46e6-83c0-c7e685cf936d\" (UID: \"0d905d20-bacc-46e6-83c0-c7e685cf936d\") " Sep 30 11:32:03 crc kubenswrapper[4730]: I0930 11:32:03.983919 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d905d20-bacc-46e6-83c0-c7e685cf936d-utilities" (OuterVolumeSpecName: "utilities") pod "0d905d20-bacc-46e6-83c0-c7e685cf936d" (UID: "0d905d20-bacc-46e6-83c0-c7e685cf936d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:32:03 crc kubenswrapper[4730]: I0930 11:32:03.989224 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d905d20-bacc-46e6-83c0-c7e685cf936d-kube-api-access-q9xwj" (OuterVolumeSpecName: "kube-api-access-q9xwj") pod "0d905d20-bacc-46e6-83c0-c7e685cf936d" (UID: "0d905d20-bacc-46e6-83c0-c7e685cf936d"). InnerVolumeSpecName "kube-api-access-q9xwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.025842 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d905d20-bacc-46e6-83c0-c7e685cf936d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0d905d20-bacc-46e6-83c0-c7e685cf936d" (UID: "0d905d20-bacc-46e6-83c0-c7e685cf936d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.086363 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d905d20-bacc-46e6-83c0-c7e685cf936d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.086420 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9xwj\" (UniqueName: \"kubernetes.io/projected/0d905d20-bacc-46e6-83c0-c7e685cf936d-kube-api-access-q9xwj\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.086438 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d905d20-bacc-46e6-83c0-c7e685cf936d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.294432 4730 generic.go:334] "Generic (PLEG): container finished" podID="0d905d20-bacc-46e6-83c0-c7e685cf936d" containerID="8f03c67b178e4e92fcd43f267119329af6402da8b53dc2dfeb5b7891a2fa3a3d" exitCode=0 Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.294503 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5ckt" event={"ID":"0d905d20-bacc-46e6-83c0-c7e685cf936d","Type":"ContainerDied","Data":"8f03c67b178e4e92fcd43f267119329af6402da8b53dc2dfeb5b7891a2fa3a3d"} Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.294515 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h5ckt" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.294546 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5ckt" event={"ID":"0d905d20-bacc-46e6-83c0-c7e685cf936d","Type":"ContainerDied","Data":"f874b80003b302ff38e96febbfbed7b3f590ef6e0c93cbb3e464dbc33a79312c"} Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.294589 4730 scope.go:117] "RemoveContainer" containerID="8f03c67b178e4e92fcd43f267119329af6402da8b53dc2dfeb5b7891a2fa3a3d" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.317993 4730 scope.go:117] "RemoveContainer" containerID="b92675fa843656434df3ad31f5b54b81efcee8d114007da4648ce0e885fde481" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.335691 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h5ckt"] Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.344147 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h5ckt"] Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.388899 4730 scope.go:117] "RemoveContainer" containerID="15c6752b6f2dab0794594f1a951d69b9b417a3e139e4c51370413a683136e87f" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.398936 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d905d20-bacc-46e6-83c0-c7e685cf936d" path="/var/lib/kubelet/pods/0d905d20-bacc-46e6-83c0-c7e685cf936d/volumes" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.416216 4730 scope.go:117] "RemoveContainer" containerID="8f03c67b178e4e92fcd43f267119329af6402da8b53dc2dfeb5b7891a2fa3a3d" Sep 30 11:32:04 crc kubenswrapper[4730]: E0930 11:32:04.416652 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f03c67b178e4e92fcd43f267119329af6402da8b53dc2dfeb5b7891a2fa3a3d\": container with ID starting with 8f03c67b178e4e92fcd43f267119329af6402da8b53dc2dfeb5b7891a2fa3a3d not found: ID does not exist" containerID="8f03c67b178e4e92fcd43f267119329af6402da8b53dc2dfeb5b7891a2fa3a3d" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.416715 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f03c67b178e4e92fcd43f267119329af6402da8b53dc2dfeb5b7891a2fa3a3d"} err="failed to get container status \"8f03c67b178e4e92fcd43f267119329af6402da8b53dc2dfeb5b7891a2fa3a3d\": rpc error: code = NotFound desc = could not find container \"8f03c67b178e4e92fcd43f267119329af6402da8b53dc2dfeb5b7891a2fa3a3d\": container with ID starting with 8f03c67b178e4e92fcd43f267119329af6402da8b53dc2dfeb5b7891a2fa3a3d not found: ID does not exist" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.416744 4730 scope.go:117] "RemoveContainer" containerID="b92675fa843656434df3ad31f5b54b81efcee8d114007da4648ce0e885fde481" Sep 30 11:32:04 crc kubenswrapper[4730]: E0930 11:32:04.417267 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b92675fa843656434df3ad31f5b54b81efcee8d114007da4648ce0e885fde481\": container with ID starting with b92675fa843656434df3ad31f5b54b81efcee8d114007da4648ce0e885fde481 not found: ID does not exist" containerID="b92675fa843656434df3ad31f5b54b81efcee8d114007da4648ce0e885fde481" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.417299 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b92675fa843656434df3ad31f5b54b81efcee8d114007da4648ce0e885fde481"} err="failed to get container status \"b92675fa843656434df3ad31f5b54b81efcee8d114007da4648ce0e885fde481\": rpc error: code = NotFound desc = could not find container \"b92675fa843656434df3ad31f5b54b81efcee8d114007da4648ce0e885fde481\": container with ID starting with b92675fa843656434df3ad31f5b54b81efcee8d114007da4648ce0e885fde481 not found: ID does not exist" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.417316 4730 scope.go:117] "RemoveContainer" containerID="15c6752b6f2dab0794594f1a951d69b9b417a3e139e4c51370413a683136e87f" Sep 30 11:32:04 crc kubenswrapper[4730]: E0930 11:32:04.417532 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15c6752b6f2dab0794594f1a951d69b9b417a3e139e4c51370413a683136e87f\": container with ID starting with 15c6752b6f2dab0794594f1a951d69b9b417a3e139e4c51370413a683136e87f not found: ID does not exist" containerID="15c6752b6f2dab0794594f1a951d69b9b417a3e139e4c51370413a683136e87f" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.417553 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15c6752b6f2dab0794594f1a951d69b9b417a3e139e4c51370413a683136e87f"} err="failed to get container status \"15c6752b6f2dab0794594f1a951d69b9b417a3e139e4c51370413a683136e87f\": rpc error: code = NotFound desc = could not find container \"15c6752b6f2dab0794594f1a951d69b9b417a3e139e4c51370413a683136e87f\": container with ID starting with 15c6752b6f2dab0794594f1a951d69b9b417a3e139e4c51370413a683136e87f not found: ID does not exist" Sep 30 11:32:04 crc kubenswrapper[4730]: I0930 11:32:04.820038 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8t7p4" podUID="3b99846a-b02f-4eaa-9751-85c0edbdc167" containerName="registry-server" probeResult="failure" output=< Sep 30 11:32:04 crc kubenswrapper[4730]: timeout: failed to connect service ":50051" within 1s Sep 30 11:32:04 crc kubenswrapper[4730]: > Sep 30 11:32:13 crc kubenswrapper[4730]: I0930 11:32:13.856733 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:32:13 crc kubenswrapper[4730]: I0930 11:32:13.918755 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:32:14 crc kubenswrapper[4730]: I0930 11:32:14.652563 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8t7p4"] Sep 30 11:32:15 crc kubenswrapper[4730]: I0930 11:32:15.413532 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8t7p4" podUID="3b99846a-b02f-4eaa-9751-85c0edbdc167" containerName="registry-server" containerID="cri-o://7a9490b8eb6873644faf47f8595c0e24801d64dd1b436dcf27094c913519e4ae" gracePeriod=2 Sep 30 11:32:15 crc kubenswrapper[4730]: I0930 11:32:15.900315 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.063299 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b99846a-b02f-4eaa-9751-85c0edbdc167-utilities\") pod \"3b99846a-b02f-4eaa-9751-85c0edbdc167\" (UID: \"3b99846a-b02f-4eaa-9751-85c0edbdc167\") " Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.063396 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rtdg\" (UniqueName: \"kubernetes.io/projected/3b99846a-b02f-4eaa-9751-85c0edbdc167-kube-api-access-8rtdg\") pod \"3b99846a-b02f-4eaa-9751-85c0edbdc167\" (UID: \"3b99846a-b02f-4eaa-9751-85c0edbdc167\") " Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.063673 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b99846a-b02f-4eaa-9751-85c0edbdc167-catalog-content\") pod \"3b99846a-b02f-4eaa-9751-85c0edbdc167\" (UID: \"3b99846a-b02f-4eaa-9751-85c0edbdc167\") " Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.064654 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b99846a-b02f-4eaa-9751-85c0edbdc167-utilities" (OuterVolumeSpecName: "utilities") pod "3b99846a-b02f-4eaa-9751-85c0edbdc167" (UID: "3b99846a-b02f-4eaa-9751-85c0edbdc167"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.071738 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b99846a-b02f-4eaa-9751-85c0edbdc167-kube-api-access-8rtdg" (OuterVolumeSpecName: "kube-api-access-8rtdg") pod "3b99846a-b02f-4eaa-9751-85c0edbdc167" (UID: "3b99846a-b02f-4eaa-9751-85c0edbdc167"). InnerVolumeSpecName "kube-api-access-8rtdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.155791 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b99846a-b02f-4eaa-9751-85c0edbdc167-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3b99846a-b02f-4eaa-9751-85c0edbdc167" (UID: "3b99846a-b02f-4eaa-9751-85c0edbdc167"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.166061 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b99846a-b02f-4eaa-9751-85c0edbdc167-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.166088 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b99846a-b02f-4eaa-9751-85c0edbdc167-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.166097 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rtdg\" (UniqueName: \"kubernetes.io/projected/3b99846a-b02f-4eaa-9751-85c0edbdc167-kube-api-access-8rtdg\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.426888 4730 generic.go:334] "Generic (PLEG): container finished" podID="3b99846a-b02f-4eaa-9751-85c0edbdc167" containerID="7a9490b8eb6873644faf47f8595c0e24801d64dd1b436dcf27094c913519e4ae" exitCode=0 Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.426985 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8t7p4" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.426993 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t7p4" event={"ID":"3b99846a-b02f-4eaa-9751-85c0edbdc167","Type":"ContainerDied","Data":"7a9490b8eb6873644faf47f8595c0e24801d64dd1b436dcf27094c913519e4ae"} Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.427059 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t7p4" event={"ID":"3b99846a-b02f-4eaa-9751-85c0edbdc167","Type":"ContainerDied","Data":"02d98bab66b92e36d12965476a7850fd1ce78882a0d980f80548473a6e19df76"} Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.427083 4730 scope.go:117] "RemoveContainer" containerID="7a9490b8eb6873644faf47f8595c0e24801d64dd1b436dcf27094c913519e4ae" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.454361 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8t7p4"] Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.460491 4730 scope.go:117] "RemoveContainer" containerID="4b70d0111659c67c5d394be94cc8b0940e230639110744ac13ba769198ffcd71" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.464083 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8t7p4"] Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.481251 4730 scope.go:117] "RemoveContainer" containerID="108644ca8d68f887b4c103a83b6b3f46cdc8fb317eac1bf66c9f575e77a7ee46" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.529112 4730 scope.go:117] "RemoveContainer" containerID="7a9490b8eb6873644faf47f8595c0e24801d64dd1b436dcf27094c913519e4ae" Sep 30 11:32:16 crc kubenswrapper[4730]: E0930 11:32:16.529591 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a9490b8eb6873644faf47f8595c0e24801d64dd1b436dcf27094c913519e4ae\": container with ID starting with 7a9490b8eb6873644faf47f8595c0e24801d64dd1b436dcf27094c913519e4ae not found: ID does not exist" containerID="7a9490b8eb6873644faf47f8595c0e24801d64dd1b436dcf27094c913519e4ae" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.529646 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a9490b8eb6873644faf47f8595c0e24801d64dd1b436dcf27094c913519e4ae"} err="failed to get container status \"7a9490b8eb6873644faf47f8595c0e24801d64dd1b436dcf27094c913519e4ae\": rpc error: code = NotFound desc = could not find container \"7a9490b8eb6873644faf47f8595c0e24801d64dd1b436dcf27094c913519e4ae\": container with ID starting with 7a9490b8eb6873644faf47f8595c0e24801d64dd1b436dcf27094c913519e4ae not found: ID does not exist" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.529672 4730 scope.go:117] "RemoveContainer" containerID="4b70d0111659c67c5d394be94cc8b0940e230639110744ac13ba769198ffcd71" Sep 30 11:32:16 crc kubenswrapper[4730]: E0930 11:32:16.529925 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b70d0111659c67c5d394be94cc8b0940e230639110744ac13ba769198ffcd71\": container with ID starting with 4b70d0111659c67c5d394be94cc8b0940e230639110744ac13ba769198ffcd71 not found: ID does not exist" containerID="4b70d0111659c67c5d394be94cc8b0940e230639110744ac13ba769198ffcd71" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.529953 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b70d0111659c67c5d394be94cc8b0940e230639110744ac13ba769198ffcd71"} err="failed to get container status \"4b70d0111659c67c5d394be94cc8b0940e230639110744ac13ba769198ffcd71\": rpc error: code = NotFound desc = could not find container \"4b70d0111659c67c5d394be94cc8b0940e230639110744ac13ba769198ffcd71\": container with ID starting with 4b70d0111659c67c5d394be94cc8b0940e230639110744ac13ba769198ffcd71 not found: ID does not exist" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.529971 4730 scope.go:117] "RemoveContainer" containerID="108644ca8d68f887b4c103a83b6b3f46cdc8fb317eac1bf66c9f575e77a7ee46" Sep 30 11:32:16 crc kubenswrapper[4730]: E0930 11:32:16.530205 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"108644ca8d68f887b4c103a83b6b3f46cdc8fb317eac1bf66c9f575e77a7ee46\": container with ID starting with 108644ca8d68f887b4c103a83b6b3f46cdc8fb317eac1bf66c9f575e77a7ee46 not found: ID does not exist" containerID="108644ca8d68f887b4c103a83b6b3f46cdc8fb317eac1bf66c9f575e77a7ee46" Sep 30 11:32:16 crc kubenswrapper[4730]: I0930 11:32:16.530227 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108644ca8d68f887b4c103a83b6b3f46cdc8fb317eac1bf66c9f575e77a7ee46"} err="failed to get container status \"108644ca8d68f887b4c103a83b6b3f46cdc8fb317eac1bf66c9f575e77a7ee46\": rpc error: code = NotFound desc = could not find container \"108644ca8d68f887b4c103a83b6b3f46cdc8fb317eac1bf66c9f575e77a7ee46\": container with ID starting with 108644ca8d68f887b4c103a83b6b3f46cdc8fb317eac1bf66c9f575e77a7ee46 not found: ID does not exist" Sep 30 11:32:18 crc kubenswrapper[4730]: I0930 11:32:18.394905 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b99846a-b02f-4eaa-9751-85c0edbdc167" path="/var/lib/kubelet/pods/3b99846a-b02f-4eaa-9751-85c0edbdc167/volumes" Sep 30 11:32:54 crc kubenswrapper[4730]: I0930 11:32:54.807764 4730 generic.go:334] "Generic (PLEG): container finished" podID="5d6d300c-5857-4de1-8317-cded656bc61e" containerID="daab50ecd778c1b470cb438a599cf601b52b412c0492d9e4766f8c4faed80864" exitCode=0 Sep 30 11:32:54 crc kubenswrapper[4730]: I0930 11:32:54.807873 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5d6d300c-5857-4de1-8317-cded656bc61e","Type":"ContainerDied","Data":"daab50ecd778c1b470cb438a599cf601b52b412c0492d9e4766f8c4faed80864"} Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.289049 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.444259 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5d6d300c-5857-4de1-8317-cded656bc61e-test-operator-ephemeral-temporary\") pod \"5d6d300c-5857-4de1-8317-cded656bc61e\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.444365 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzs5v\" (UniqueName: \"kubernetes.io/projected/5d6d300c-5857-4de1-8317-cded656bc61e-kube-api-access-dzs5v\") pod \"5d6d300c-5857-4de1-8317-cded656bc61e\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.444417 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5d6d300c-5857-4de1-8317-cded656bc61e-test-operator-ephemeral-workdir\") pod \"5d6d300c-5857-4de1-8317-cded656bc61e\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.444637 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-openstack-config-secret\") pod \"5d6d300c-5857-4de1-8317-cded656bc61e\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.444748 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d6d300c-5857-4de1-8317-cded656bc61e-config-data\") pod \"5d6d300c-5857-4de1-8317-cded656bc61e\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.444836 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-ssh-key\") pod \"5d6d300c-5857-4de1-8317-cded656bc61e\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.444860 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"5d6d300c-5857-4de1-8317-cded656bc61e\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.444904 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-ca-certs\") pod \"5d6d300c-5857-4de1-8317-cded656bc61e\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.445026 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d6d300c-5857-4de1-8317-cded656bc61e-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "5d6d300c-5857-4de1-8317-cded656bc61e" (UID: "5d6d300c-5857-4de1-8317-cded656bc61e"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.445047 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5d6d300c-5857-4de1-8317-cded656bc61e-openstack-config\") pod \"5d6d300c-5857-4de1-8317-cded656bc61e\" (UID: \"5d6d300c-5857-4de1-8317-cded656bc61e\") " Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.446038 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d6d300c-5857-4de1-8317-cded656bc61e-config-data" (OuterVolumeSpecName: "config-data") pod "5d6d300c-5857-4de1-8317-cded656bc61e" (UID: "5d6d300c-5857-4de1-8317-cded656bc61e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.447116 4730 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5d6d300c-5857-4de1-8317-cded656bc61e-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.447364 4730 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/5d6d300c-5857-4de1-8317-cded656bc61e-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.453836 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "test-operator-logs") pod "5d6d300c-5857-4de1-8317-cded656bc61e" (UID: "5d6d300c-5857-4de1-8317-cded656bc61e"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.453981 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d6d300c-5857-4de1-8317-cded656bc61e-kube-api-access-dzs5v" (OuterVolumeSpecName: "kube-api-access-dzs5v") pod "5d6d300c-5857-4de1-8317-cded656bc61e" (UID: "5d6d300c-5857-4de1-8317-cded656bc61e"). InnerVolumeSpecName "kube-api-access-dzs5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.457929 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d6d300c-5857-4de1-8317-cded656bc61e-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "5d6d300c-5857-4de1-8317-cded656bc61e" (UID: "5d6d300c-5857-4de1-8317-cded656bc61e"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.476225 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "5d6d300c-5857-4de1-8317-cded656bc61e" (UID: "5d6d300c-5857-4de1-8317-cded656bc61e"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.476843 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5d6d300c-5857-4de1-8317-cded656bc61e" (UID: "5d6d300c-5857-4de1-8317-cded656bc61e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.482487 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "5d6d300c-5857-4de1-8317-cded656bc61e" (UID: "5d6d300c-5857-4de1-8317-cded656bc61e"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.500957 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d6d300c-5857-4de1-8317-cded656bc61e-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "5d6d300c-5857-4de1-8317-cded656bc61e" (UID: "5d6d300c-5857-4de1-8317-cded656bc61e"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.548938 4730 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5d6d300c-5857-4de1-8317-cded656bc61e-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.549257 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzs5v\" (UniqueName: \"kubernetes.io/projected/5d6d300c-5857-4de1-8317-cded656bc61e-kube-api-access-dzs5v\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.549342 4730 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/5d6d300c-5857-4de1-8317-cded656bc61e-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.549491 4730 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.549588 4730 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.549685 4730 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.549754 4730 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/5d6d300c-5857-4de1-8317-cded656bc61e-ca-certs\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.569897 4730 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.651761 4730 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.832603 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"5d6d300c-5857-4de1-8317-cded656bc61e","Type":"ContainerDied","Data":"60d2dd3529780567a657518727a20d1b4b6aca8b6472a77440f82f1077c8325f"} Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.832694 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60d2dd3529780567a657518727a20d1b4b6aca8b6472a77440f82f1077c8325f" Sep 30 11:32:56 crc kubenswrapper[4730]: I0930 11:32:56.832990 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.363544 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 30 11:33:07 crc kubenswrapper[4730]: E0930 11:33:07.364457 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b99846a-b02f-4eaa-9751-85c0edbdc167" containerName="extract-utilities" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.364470 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b99846a-b02f-4eaa-9751-85c0edbdc167" containerName="extract-utilities" Sep 30 11:33:07 crc kubenswrapper[4730]: E0930 11:33:07.364492 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d905d20-bacc-46e6-83c0-c7e685cf936d" containerName="extract-utilities" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.364497 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d905d20-bacc-46e6-83c0-c7e685cf936d" containerName="extract-utilities" Sep 30 11:33:07 crc kubenswrapper[4730]: E0930 11:33:07.364514 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d905d20-bacc-46e6-83c0-c7e685cf936d" containerName="extract-content" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.364520 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d905d20-bacc-46e6-83c0-c7e685cf936d" containerName="extract-content" Sep 30 11:33:07 crc kubenswrapper[4730]: E0930 11:33:07.364534 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b99846a-b02f-4eaa-9751-85c0edbdc167" containerName="extract-content" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.364541 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b99846a-b02f-4eaa-9751-85c0edbdc167" containerName="extract-content" Sep 30 11:33:07 crc kubenswrapper[4730]: E0930 11:33:07.364553 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d6d300c-5857-4de1-8317-cded656bc61e" containerName="tempest-tests-tempest-tests-runner" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.364560 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d6d300c-5857-4de1-8317-cded656bc61e" containerName="tempest-tests-tempest-tests-runner" Sep 30 11:33:07 crc kubenswrapper[4730]: E0930 11:33:07.364576 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d905d20-bacc-46e6-83c0-c7e685cf936d" containerName="registry-server" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.364584 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d905d20-bacc-46e6-83c0-c7e685cf936d" containerName="registry-server" Sep 30 11:33:07 crc kubenswrapper[4730]: E0930 11:33:07.364604 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b99846a-b02f-4eaa-9751-85c0edbdc167" containerName="registry-server" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.364635 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b99846a-b02f-4eaa-9751-85c0edbdc167" containerName="registry-server" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.364826 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d905d20-bacc-46e6-83c0-c7e685cf936d" containerName="registry-server" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.364844 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b99846a-b02f-4eaa-9751-85c0edbdc167" containerName="registry-server" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.364858 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d6d300c-5857-4de1-8317-cded656bc61e" containerName="tempest-tests-tempest-tests-runner" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.365550 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.370139 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-thzbw" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.381081 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.469736 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c10a38bd-1150-4f8a-b74d-5f6c7498387b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.469779 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwr59\" (UniqueName: \"kubernetes.io/projected/c10a38bd-1150-4f8a-b74d-5f6c7498387b-kube-api-access-kwr59\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c10a38bd-1150-4f8a-b74d-5f6c7498387b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.571584 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c10a38bd-1150-4f8a-b74d-5f6c7498387b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.572047 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwr59\" (UniqueName: \"kubernetes.io/projected/c10a38bd-1150-4f8a-b74d-5f6c7498387b-kube-api-access-kwr59\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c10a38bd-1150-4f8a-b74d-5f6c7498387b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.572341 4730 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c10a38bd-1150-4f8a-b74d-5f6c7498387b\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.590825 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwr59\" (UniqueName: \"kubernetes.io/projected/c10a38bd-1150-4f8a-b74d-5f6c7498387b-kube-api-access-kwr59\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c10a38bd-1150-4f8a-b74d-5f6c7498387b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.614491 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"c10a38bd-1150-4f8a-b74d-5f6c7498387b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 11:33:07 crc kubenswrapper[4730]: I0930 11:33:07.700264 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 30 11:33:08 crc kubenswrapper[4730]: I0930 11:33:08.162638 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 30 11:33:08 crc kubenswrapper[4730]: I0930 11:33:08.962715 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"c10a38bd-1150-4f8a-b74d-5f6c7498387b","Type":"ContainerStarted","Data":"b5241110e8dbfabf94774ce62d3cde24a9d1bca8a5d3a0d6706e3b93605d45f0"} Sep 30 11:33:09 crc kubenswrapper[4730]: I0930 11:33:09.979956 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"c10a38bd-1150-4f8a-b74d-5f6c7498387b","Type":"ContainerStarted","Data":"3d8e2ae51e689869eb223f20ed80b687fb670000e36253382cbee69be3bf0015"} Sep 30 11:33:09 crc kubenswrapper[4730]: I0930 11:33:09.993315 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.800819892 podStartE2EDuration="2.993291633s" podCreationTimestamp="2025-09-30 11:33:07 +0000 UTC" firstStartedPulling="2025-09-30 11:33:08.169350565 +0000 UTC m=+6232.502610558" lastFinishedPulling="2025-09-30 11:33:09.361822296 +0000 UTC m=+6233.695082299" observedRunningTime="2025-09-30 11:33:09.991882956 +0000 UTC m=+6234.325142959" watchObservedRunningTime="2025-09-30 11:33:09.993291633 +0000 UTC m=+6234.326551636" Sep 30 11:33:28 crc kubenswrapper[4730]: I0930 11:33:28.403766 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6kb84/must-gather-8d298"] Sep 30 11:33:28 crc kubenswrapper[4730]: I0930 11:33:28.405713 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/must-gather-8d298" Sep 30 11:33:28 crc kubenswrapper[4730]: I0930 11:33:28.407272 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-6kb84"/"kube-root-ca.crt" Sep 30 11:33:28 crc kubenswrapper[4730]: I0930 11:33:28.408019 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-6kb84"/"default-dockercfg-wj67d" Sep 30 11:33:28 crc kubenswrapper[4730]: I0930 11:33:28.413625 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-6kb84/must-gather-8d298"] Sep 30 11:33:28 crc kubenswrapper[4730]: I0930 11:33:28.413896 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-6kb84"/"openshift-service-ca.crt" Sep 30 11:33:28 crc kubenswrapper[4730]: I0930 11:33:28.514660 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dnxw\" (UniqueName: \"kubernetes.io/projected/242561f4-5dfa-49fa-8c0b-4e6db0342923-kube-api-access-4dnxw\") pod \"must-gather-8d298\" (UID: \"242561f4-5dfa-49fa-8c0b-4e6db0342923\") " pod="openshift-must-gather-6kb84/must-gather-8d298" Sep 30 11:33:28 crc kubenswrapper[4730]: I0930 11:33:28.514924 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/242561f4-5dfa-49fa-8c0b-4e6db0342923-must-gather-output\") pod \"must-gather-8d298\" (UID: \"242561f4-5dfa-49fa-8c0b-4e6db0342923\") " pod="openshift-must-gather-6kb84/must-gather-8d298" Sep 30 11:33:28 crc kubenswrapper[4730]: I0930 11:33:28.616893 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/242561f4-5dfa-49fa-8c0b-4e6db0342923-must-gather-output\") pod \"must-gather-8d298\" (UID: \"242561f4-5dfa-49fa-8c0b-4e6db0342923\") " pod="openshift-must-gather-6kb84/must-gather-8d298" Sep 30 11:33:28 crc kubenswrapper[4730]: I0930 11:33:28.617012 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dnxw\" (UniqueName: \"kubernetes.io/projected/242561f4-5dfa-49fa-8c0b-4e6db0342923-kube-api-access-4dnxw\") pod \"must-gather-8d298\" (UID: \"242561f4-5dfa-49fa-8c0b-4e6db0342923\") " pod="openshift-must-gather-6kb84/must-gather-8d298" Sep 30 11:33:28 crc kubenswrapper[4730]: I0930 11:33:28.617698 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/242561f4-5dfa-49fa-8c0b-4e6db0342923-must-gather-output\") pod \"must-gather-8d298\" (UID: \"242561f4-5dfa-49fa-8c0b-4e6db0342923\") " pod="openshift-must-gather-6kb84/must-gather-8d298" Sep 30 11:33:28 crc kubenswrapper[4730]: I0930 11:33:28.645763 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dnxw\" (UniqueName: \"kubernetes.io/projected/242561f4-5dfa-49fa-8c0b-4e6db0342923-kube-api-access-4dnxw\") pod \"must-gather-8d298\" (UID: \"242561f4-5dfa-49fa-8c0b-4e6db0342923\") " pod="openshift-must-gather-6kb84/must-gather-8d298" Sep 30 11:33:28 crc kubenswrapper[4730]: I0930 11:33:28.721878 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/must-gather-8d298" Sep 30 11:33:29 crc kubenswrapper[4730]: I0930 11:33:29.171370 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-6kb84/must-gather-8d298"] Sep 30 11:33:29 crc kubenswrapper[4730]: I0930 11:33:29.177787 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 11:33:29 crc kubenswrapper[4730]: I0930 11:33:29.193263 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6kb84/must-gather-8d298" event={"ID":"242561f4-5dfa-49fa-8c0b-4e6db0342923","Type":"ContainerStarted","Data":"f30760953bc3bb3abfed4945b564efe77f300750d36338aab93e1682b290046b"} Sep 30 11:33:32 crc kubenswrapper[4730]: I0930 11:33:32.337228 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:33:32 crc kubenswrapper[4730]: I0930 11:33:32.337556 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:33:36 crc kubenswrapper[4730]: I0930 11:33:36.265669 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6kb84/must-gather-8d298" event={"ID":"242561f4-5dfa-49fa-8c0b-4e6db0342923","Type":"ContainerStarted","Data":"effa99d252a63927551e37d4e045c36c051d3611b2911525b9a684592b0d1200"} Sep 30 11:33:36 crc kubenswrapper[4730]: I0930 11:33:36.266173 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6kb84/must-gather-8d298" event={"ID":"242561f4-5dfa-49fa-8c0b-4e6db0342923","Type":"ContainerStarted","Data":"384896d208e80bdbbad7fb038bef4d5f8515d13173fedb37b33513bfbd02c07e"} Sep 30 11:33:36 crc kubenswrapper[4730]: I0930 11:33:36.290042 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-6kb84/must-gather-8d298" podStartSLOduration=2.163513479 podStartE2EDuration="8.290016815s" podCreationTimestamp="2025-09-30 11:33:28 +0000 UTC" firstStartedPulling="2025-09-30 11:33:29.177581526 +0000 UTC m=+6253.510841519" lastFinishedPulling="2025-09-30 11:33:35.304084852 +0000 UTC m=+6259.637344855" observedRunningTime="2025-09-30 11:33:36.279709592 +0000 UTC m=+6260.612969655" watchObservedRunningTime="2025-09-30 11:33:36.290016815 +0000 UTC m=+6260.623276838" Sep 30 11:33:39 crc kubenswrapper[4730]: I0930 11:33:39.647342 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6kb84/crc-debug-j44np"] Sep 30 11:33:39 crc kubenswrapper[4730]: I0930 11:33:39.649200 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/crc-debug-j44np" Sep 30 11:33:39 crc kubenswrapper[4730]: I0930 11:33:39.764372 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhrkj\" (UniqueName: \"kubernetes.io/projected/30d121c5-00a1-4dc2-997f-18abf73f5ab5-kube-api-access-qhrkj\") pod \"crc-debug-j44np\" (UID: \"30d121c5-00a1-4dc2-997f-18abf73f5ab5\") " pod="openshift-must-gather-6kb84/crc-debug-j44np" Sep 30 11:33:39 crc kubenswrapper[4730]: I0930 11:33:39.764743 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/30d121c5-00a1-4dc2-997f-18abf73f5ab5-host\") pod \"crc-debug-j44np\" (UID: \"30d121c5-00a1-4dc2-997f-18abf73f5ab5\") " pod="openshift-must-gather-6kb84/crc-debug-j44np" Sep 30 11:33:39 crc kubenswrapper[4730]: I0930 11:33:39.866038 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/30d121c5-00a1-4dc2-997f-18abf73f5ab5-host\") pod \"crc-debug-j44np\" (UID: \"30d121c5-00a1-4dc2-997f-18abf73f5ab5\") " pod="openshift-must-gather-6kb84/crc-debug-j44np" Sep 30 11:33:39 crc kubenswrapper[4730]: I0930 11:33:39.866244 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhrkj\" (UniqueName: \"kubernetes.io/projected/30d121c5-00a1-4dc2-997f-18abf73f5ab5-kube-api-access-qhrkj\") pod \"crc-debug-j44np\" (UID: \"30d121c5-00a1-4dc2-997f-18abf73f5ab5\") " pod="openshift-must-gather-6kb84/crc-debug-j44np" Sep 30 11:33:39 crc kubenswrapper[4730]: I0930 11:33:39.868414 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/30d121c5-00a1-4dc2-997f-18abf73f5ab5-host\") pod \"crc-debug-j44np\" (UID: \"30d121c5-00a1-4dc2-997f-18abf73f5ab5\") " pod="openshift-must-gather-6kb84/crc-debug-j44np" Sep 30 11:33:39 crc kubenswrapper[4730]: I0930 11:33:39.894040 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhrkj\" (UniqueName: \"kubernetes.io/projected/30d121c5-00a1-4dc2-997f-18abf73f5ab5-kube-api-access-qhrkj\") pod \"crc-debug-j44np\" (UID: \"30d121c5-00a1-4dc2-997f-18abf73f5ab5\") " pod="openshift-must-gather-6kb84/crc-debug-j44np" Sep 30 11:33:39 crc kubenswrapper[4730]: I0930 11:33:39.970792 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/crc-debug-j44np" Sep 30 11:33:40 crc kubenswrapper[4730]: I0930 11:33:40.304384 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6kb84/crc-debug-j44np" event={"ID":"30d121c5-00a1-4dc2-997f-18abf73f5ab5","Type":"ContainerStarted","Data":"e9d9b5bd2427caa34b5c35a9799d4b56113de9a9c8483eba2737b0cfe9665f83"} Sep 30 11:33:51 crc kubenswrapper[4730]: I0930 11:33:51.405050 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6kb84/crc-debug-j44np" event={"ID":"30d121c5-00a1-4dc2-997f-18abf73f5ab5","Type":"ContainerStarted","Data":"0a96abf9fa9016e630601a8bcd7708f411f22d5dedf8350200c67e7ab47af270"} Sep 30 11:33:51 crc kubenswrapper[4730]: I0930 11:33:51.426558 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-6kb84/crc-debug-j44np" podStartSLOduration=1.985381312 podStartE2EDuration="12.426537664s" podCreationTimestamp="2025-09-30 11:33:39 +0000 UTC" firstStartedPulling="2025-09-30 11:33:40.018950114 +0000 UTC m=+6264.352210127" lastFinishedPulling="2025-09-30 11:33:50.460106486 +0000 UTC m=+6274.793366479" observedRunningTime="2025-09-30 11:33:51.41690009 +0000 UTC m=+6275.750160083" watchObservedRunningTime="2025-09-30 11:33:51.426537664 +0000 UTC m=+6275.759797677" Sep 30 11:34:02 crc kubenswrapper[4730]: I0930 11:34:02.336926 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:34:02 crc kubenswrapper[4730]: I0930 11:34:02.337467 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:34:32 crc kubenswrapper[4730]: I0930 11:34:32.336458 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:34:32 crc kubenswrapper[4730]: I0930 11:34:32.337029 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:34:32 crc kubenswrapper[4730]: I0930 11:34:32.337082 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 11:34:32 crc kubenswrapper[4730]: I0930 11:34:32.337689 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 11:34:32 crc kubenswrapper[4730]: I0930 11:34:32.337744 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" gracePeriod=600 Sep 30 11:34:32 crc kubenswrapper[4730]: E0930 11:34:32.461552 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:34:32 crc kubenswrapper[4730]: I0930 11:34:32.812721 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" exitCode=0 Sep 30 11:34:32 crc kubenswrapper[4730]: I0930 11:34:32.812798 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b"} Sep 30 11:34:32 crc kubenswrapper[4730]: I0930 11:34:32.813088 4730 scope.go:117] "RemoveContainer" containerID="949efe096ebda614ec0ed66c9871e458cfa45addba67825624556af6913f3633" Sep 30 11:34:32 crc kubenswrapper[4730]: I0930 11:34:32.813893 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:34:32 crc kubenswrapper[4730]: E0930 11:34:32.814249 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:34:46 crc kubenswrapper[4730]: I0930 11:34:46.388560 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:34:46 crc kubenswrapper[4730]: E0930 11:34:46.389867 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:34:59 crc kubenswrapper[4730]: I0930 11:34:59.382028 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:34:59 crc kubenswrapper[4730]: E0930 11:34:59.382783 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:35:04 crc kubenswrapper[4730]: I0930 11:35:04.566735 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54cc89d54-v6hb5_857c5dfa-3085-497b-8466-96eefd60c85d/barbican-api/0.log" Sep 30 11:35:04 crc kubenswrapper[4730]: I0930 11:35:04.747126 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54cc89d54-v6hb5_857c5dfa-3085-497b-8466-96eefd60c85d/barbican-api-log/0.log" Sep 30 11:35:05 crc kubenswrapper[4730]: I0930 11:35:05.119413 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6cdddb844-vsgsp_1018e1cd-c432-45b3-8267-0f37607cff2f/barbican-keystone-listener/0.log" Sep 30 11:35:05 crc kubenswrapper[4730]: I0930 11:35:05.365944 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6cdddb844-vsgsp_1018e1cd-c432-45b3-8267-0f37607cff2f/barbican-keystone-listener-log/0.log" Sep 30 11:35:05 crc kubenswrapper[4730]: I0930 11:35:05.717714 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-665f7f89df-b6qmw_87696e9f-ed08-459f-80b7-c4c5499e4157/barbican-worker/0.log" Sep 30 11:35:05 crc kubenswrapper[4730]: I0930 11:35:05.771345 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-665f7f89df-b6qmw_87696e9f-ed08-459f-80b7-c4c5499e4157/barbican-worker-log/0.log" Sep 30 11:35:05 crc kubenswrapper[4730]: I0930 11:35:05.984528 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs_96923f77-1ffc-4d73-adf3-33f66499e0f9/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:06 crc kubenswrapper[4730]: I0930 11:35:06.233302 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4eb0bfb8-0c6a-4d5f-9b85-e06888511203/ceilometer-central-agent/0.log" Sep 30 11:35:06 crc kubenswrapper[4730]: I0930 11:35:06.345671 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4eb0bfb8-0c6a-4d5f-9b85-e06888511203/ceilometer-notification-agent/0.log" Sep 30 11:35:06 crc kubenswrapper[4730]: I0930 11:35:06.444235 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4eb0bfb8-0c6a-4d5f-9b85-e06888511203/proxy-httpd/0.log" Sep 30 11:35:06 crc kubenswrapper[4730]: I0930 11:35:06.545332 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4eb0bfb8-0c6a-4d5f-9b85-e06888511203/sg-core/0.log" Sep 30 11:35:06 crc kubenswrapper[4730]: I0930 11:35:06.709466 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw_d0e46e4d-3e83-4381-b519-f840ffb1911b/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:06 crc kubenswrapper[4730]: I0930 11:35:06.904278 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s_15b0b939-744d-4ea8-8ff3-942843d32348/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:07 crc kubenswrapper[4730]: I0930 11:35:07.242220 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_063272c9-558f-47d6-84c0-aa0de64bb715/cinder-api-log/0.log" Sep 30 11:35:08 crc kubenswrapper[4730]: I0930 11:35:08.136331 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_063272c9-558f-47d6-84c0-aa0de64bb715/cinder-api/0.log" Sep 30 11:35:08 crc kubenswrapper[4730]: I0930 11:35:08.261847 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_0983f2ba-1e23-492a-abb5-5cc4f4199925/cinder-backup/0.log" Sep 30 11:35:08 crc kubenswrapper[4730]: I0930 11:35:08.336367 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_0983f2ba-1e23-492a-abb5-5cc4f4199925/probe/0.log" Sep 30 11:35:08 crc kubenswrapper[4730]: I0930 11:35:08.504871 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09/cinder-scheduler/0.log" Sep 30 11:35:08 crc kubenswrapper[4730]: I0930 11:35:08.706224 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09/probe/0.log" Sep 30 11:35:08 crc kubenswrapper[4730]: I0930 11:35:08.911855 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_ad078102-347b-4f85-8fa5-f83cbf35c06a/probe/0.log" Sep 30 11:35:09 crc kubenswrapper[4730]: I0930 11:35:09.047249 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_ad078102-347b-4f85-8fa5-f83cbf35c06a/cinder-volume/0.log" Sep 30 11:35:09 crc kubenswrapper[4730]: I0930 11:35:09.351769 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume2-0_12c02e5f-fb4a-46e7-8772-07bbe148bdcd/probe/0.log" Sep 30 11:35:09 crc kubenswrapper[4730]: I0930 11:35:09.391236 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume2-0_12c02e5f-fb4a-46e7-8772-07bbe148bdcd/cinder-volume/0.log" Sep 30 11:35:09 crc kubenswrapper[4730]: I0930 11:35:09.537684 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl_076a53c1-4f43-4c11-b67a-163d1fe06287/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:09 crc kubenswrapper[4730]: I0930 11:35:09.738228 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8_64074625-ee39-4163-afbf-bc8e220b63e7/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:09 crc kubenswrapper[4730]: I0930 11:35:09.900504 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5c4f8cc8c-r8gbc_7562489e-f18e-470f-a208-8479d49513f9/init/0.log" Sep 30 11:35:10 crc kubenswrapper[4730]: I0930 11:35:10.081450 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5c4f8cc8c-r8gbc_7562489e-f18e-470f-a208-8479d49513f9/init/0.log" Sep 30 11:35:10 crc kubenswrapper[4730]: I0930 11:35:10.380439 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:35:10 crc kubenswrapper[4730]: E0930 11:35:10.380753 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:35:10 crc kubenswrapper[4730]: I0930 11:35:10.524435 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e5bde710-0351-45d0-acb8-990719f9ba34/glance-httpd/0.log" Sep 30 11:35:10 crc kubenswrapper[4730]: I0930 11:35:10.666760 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5c4f8cc8c-r8gbc_7562489e-f18e-470f-a208-8479d49513f9/dnsmasq-dns/0.log" Sep 30 11:35:10 crc kubenswrapper[4730]: I0930 11:35:10.681069 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e5bde710-0351-45d0-acb8-990719f9ba34/glance-log/0.log" Sep 30 11:35:10 crc kubenswrapper[4730]: I0930 11:35:10.870882 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8532ec07-5ee5-40c9-82f5-df62806d03f5/glance-httpd/0.log" Sep 30 11:35:10 crc kubenswrapper[4730]: I0930 11:35:10.981886 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8532ec07-5ee5-40c9-82f5-df62806d03f5/glance-log/0.log" Sep 30 11:35:11 crc kubenswrapper[4730]: I0930 11:35:11.281577 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6b9f68988b-b4q58_73859337-4ff6-4ada-bc9b-a29b6b1fc478/horizon/0.log" Sep 30 11:35:11 crc kubenswrapper[4730]: I0930 11:35:11.715230 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-47s8l_c191d318-9d8c-4aac-bbc9-371553bb29bf/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:11 crc kubenswrapper[4730]: I0930 11:35:11.792911 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6b9f68988b-b4q58_73859337-4ff6-4ada-bc9b-a29b6b1fc478/horizon-log/0.log" Sep 30 11:35:11 crc kubenswrapper[4730]: I0930 11:35:11.961499 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-xhgzq_9e488e17-15bf-414e-b0cb-e5b3dbf22769/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:12 crc kubenswrapper[4730]: I0930 11:35:12.551121 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29320501-wlbqz_8a0c7d59-5820-461e-a4ee-7ff69b8feadd/keystone-cron/0.log" Sep 30 11:35:12 crc kubenswrapper[4730]: I0930 11:35:12.586858 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7dfc9d946-psk9c_fdc671fd-529e-484c-9924-355c64d393ff/keystone-api/0.log" Sep 30 11:35:12 crc kubenswrapper[4730]: I0930 11:35:12.713006 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_5c0ac696-252a-4b32-8086-a6d3a02e945f/kube-state-metrics/0.log" Sep 30 11:35:12 crc kubenswrapper[4730]: I0930 11:35:12.939787 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h_114c62cf-b040-491e-90fa-794b4cc29361/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:13 crc kubenswrapper[4730]: I0930 11:35:13.692621 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9cf79fcd5-4nrfn_2cb5a17e-0f48-4341-9a0e-9c84e63fed3b/neutron-api/0.log" Sep 30 11:35:13 crc kubenswrapper[4730]: I0930 11:35:13.803886 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9cf79fcd5-4nrfn_2cb5a17e-0f48-4341-9a0e-9c84e63fed3b/neutron-httpd/0.log" Sep 30 11:35:14 crc kubenswrapper[4730]: I0930 11:35:14.017217 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8_25b53534-c380-44dd-aa82-22606c2a5d22/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:16 crc kubenswrapper[4730]: I0930 11:35:16.053659 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0332adf1-8148-4923-9273-1ef8869dfad1/nova-api-log/0.log" Sep 30 11:35:16 crc kubenswrapper[4730]: I0930 11:35:16.101434 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0332adf1-8148-4923-9273-1ef8869dfad1/nova-api-api/0.log" Sep 30 11:35:16 crc kubenswrapper[4730]: I0930 11:35:16.618076 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a/nova-cell0-conductor-conductor/0.log" Sep 30 11:35:16 crc kubenswrapper[4730]: I0930 11:35:16.912652 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_4521e8b6-3634-4d50-9050-0fccded8d973/nova-cell1-conductor-conductor/0.log" Sep 30 11:35:17 crc kubenswrapper[4730]: I0930 11:35:17.150391 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c/nova-cell1-novncproxy-novncproxy/0.log" Sep 30 11:35:17 crc kubenswrapper[4730]: I0930 11:35:17.635214 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk_80c10bdf-95bb-4372-ba25-b7bd5f563225/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:17 crc kubenswrapper[4730]: I0930 11:35:17.782165 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a3c8895e-0bd8-4e06-a121-3afe3bcdf54f/nova-metadata-log/0.log" Sep 30 11:35:18 crc kubenswrapper[4730]: I0930 11:35:18.314822 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c1e0179d-6dc3-4dec-8ff6-48c794add5a3/mysql-bootstrap/0.log" Sep 30 11:35:18 crc kubenswrapper[4730]: I0930 11:35:18.388170 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f/nova-scheduler-scheduler/0.log" Sep 30 11:35:18 crc kubenswrapper[4730]: I0930 11:35:18.522821 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c1e0179d-6dc3-4dec-8ff6-48c794add5a3/mysql-bootstrap/0.log" Sep 30 11:35:18 crc kubenswrapper[4730]: I0930 11:35:18.678097 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c1e0179d-6dc3-4dec-8ff6-48c794add5a3/galera/0.log" Sep 30 11:35:18 crc kubenswrapper[4730]: I0930 11:35:18.915028 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1/mysql-bootstrap/0.log" Sep 30 11:35:19 crc kubenswrapper[4730]: I0930 11:35:19.126021 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1/mysql-bootstrap/0.log" Sep 30 11:35:19 crc kubenswrapper[4730]: I0930 11:35:19.154563 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1/galera/0.log" Sep 30 11:35:19 crc kubenswrapper[4730]: I0930 11:35:19.398391 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_7177e538-cc55-44d5-9274-67a54b79f589/openstackclient/0.log" Sep 30 11:35:19 crc kubenswrapper[4730]: I0930 11:35:19.580991 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-4tlvb_d247bfe5-48d7-49be-9cd4-2d3368015e3a/ovn-controller/0.log" Sep 30 11:35:19 crc kubenswrapper[4730]: I0930 11:35:19.815313 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-zh8k5_f609f208-0734-4364-b05e-0364bded655e/openstack-network-exporter/0.log" Sep 30 11:35:20 crc kubenswrapper[4730]: I0930 11:35:20.065187 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wb9fw_43c558ac-76c0-4c01-a265-41320a386add/ovsdb-server-init/0.log" Sep 30 11:35:20 crc kubenswrapper[4730]: I0930 11:35:20.253728 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wb9fw_43c558ac-76c0-4c01-a265-41320a386add/ovsdb-server-init/0.log" Sep 30 11:35:20 crc kubenswrapper[4730]: I0930 11:35:20.445165 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wb9fw_43c558ac-76c0-4c01-a265-41320a386add/ovsdb-server/0.log" Sep 30 11:35:20 crc kubenswrapper[4730]: I0930 11:35:20.705593 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wb9fw_43c558ac-76c0-4c01-a265-41320a386add/ovs-vswitchd/0.log" Sep 30 11:35:20 crc kubenswrapper[4730]: I0930 11:35:20.745249 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a3c8895e-0bd8-4e06-a121-3afe3bcdf54f/nova-metadata-metadata/0.log" Sep 30 11:35:20 crc kubenswrapper[4730]: I0930 11:35:20.949508 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-br6q6_ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:20 crc kubenswrapper[4730]: I0930 11:35:20.957028 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_841c63b2-fda6-4269-8ba8-8567555326b4/openstack-network-exporter/0.log" Sep 30 11:35:21 crc kubenswrapper[4730]: I0930 11:35:21.139214 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_841c63b2-fda6-4269-8ba8-8567555326b4/ovn-northd/0.log" Sep 30 11:35:21 crc kubenswrapper[4730]: I0930 11:35:21.165750 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_d84e120e-743b-4797-aa0a-e231ecfa59ab/openstack-network-exporter/0.log" Sep 30 11:35:21 crc kubenswrapper[4730]: I0930 11:35:21.317213 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_d84e120e-743b-4797-aa0a-e231ecfa59ab/ovsdbserver-nb/0.log" Sep 30 11:35:21 crc kubenswrapper[4730]: I0930 11:35:21.444575 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5e6063f5-43cd-45ec-9ac3-4de0fd55cb15/openstack-network-exporter/0.log" Sep 30 11:35:21 crc kubenswrapper[4730]: I0930 11:35:21.611860 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5e6063f5-43cd-45ec-9ac3-4de0fd55cb15/ovsdbserver-sb/0.log" Sep 30 11:35:21 crc kubenswrapper[4730]: I0930 11:35:21.844505 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7d48d7c7fd-7l8hx_57cdca52-f1f9-48c7-8fb6-9144a033c957/placement-api/0.log" Sep 30 11:35:22 crc kubenswrapper[4730]: I0930 11:35:22.065573 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7d48d7c7fd-7l8hx_57cdca52-f1f9-48c7-8fb6-9144a033c957/placement-log/0.log" Sep 30 11:35:22 crc kubenswrapper[4730]: I0930 11:35:22.306769 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aae3dec7-c6e3-4bd3-ad48-96d4d959d228/init-config-reloader/0.log" Sep 30 11:35:22 crc kubenswrapper[4730]: I0930 11:35:22.386111 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:35:22 crc kubenswrapper[4730]: E0930 11:35:22.386450 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:35:22 crc kubenswrapper[4730]: I0930 11:35:22.488605 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aae3dec7-c6e3-4bd3-ad48-96d4d959d228/init-config-reloader/0.log" Sep 30 11:35:22 crc kubenswrapper[4730]: I0930 11:35:22.503489 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aae3dec7-c6e3-4bd3-ad48-96d4d959d228/config-reloader/0.log" Sep 30 11:35:22 crc kubenswrapper[4730]: I0930 11:35:22.544156 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aae3dec7-c6e3-4bd3-ad48-96d4d959d228/prometheus/0.log" Sep 30 11:35:22 crc kubenswrapper[4730]: I0930 11:35:22.795193 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aae3dec7-c6e3-4bd3-ad48-96d4d959d228/thanos-sidecar/0.log" Sep 30 11:35:22 crc kubenswrapper[4730]: I0930 11:35:22.799247 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a3b79a67-6ca7-44cd-8108-9afb64437809/setup-container/0.log" Sep 30 11:35:23 crc kubenswrapper[4730]: I0930 11:35:23.032778 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a3b79a67-6ca7-44cd-8108-9afb64437809/setup-container/0.log" Sep 30 11:35:23 crc kubenswrapper[4730]: I0930 11:35:23.036119 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a3b79a67-6ca7-44cd-8108-9afb64437809/rabbitmq/0.log" Sep 30 11:35:23 crc kubenswrapper[4730]: I0930 11:35:23.237775 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_7ba6b518-edfa-4d19-b096-03d7d96c51a3/setup-container/0.log" Sep 30 11:35:23 crc kubenswrapper[4730]: I0930 11:35:23.400310 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_7ba6b518-edfa-4d19-b096-03d7d96c51a3/setup-container/0.log" Sep 30 11:35:23 crc kubenswrapper[4730]: I0930 11:35:23.427733 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_7ba6b518-edfa-4d19-b096-03d7d96c51a3/rabbitmq/0.log" Sep 30 11:35:23 crc kubenswrapper[4730]: I0930 11:35:23.602532 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a1a78aec-c35b-41c6-a1e0-43fba77e84fd/setup-container/0.log" Sep 30 11:35:23 crc kubenswrapper[4730]: I0930 11:35:23.808012 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a1a78aec-c35b-41c6-a1e0-43fba77e84fd/setup-container/0.log" Sep 30 11:35:23 crc kubenswrapper[4730]: I0930 11:35:23.841504 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a1a78aec-c35b-41c6-a1e0-43fba77e84fd/rabbitmq/0.log" Sep 30 11:35:24 crc kubenswrapper[4730]: I0930 11:35:24.007515 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs_b5e99fb9-fd07-410f-8c9b-bde6849b5655/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:24 crc kubenswrapper[4730]: I0930 11:35:24.055829 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f_9c876aea-c4ac-4055-953d-9bedb3615be5/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:24 crc kubenswrapper[4730]: I0930 11:35:24.317888 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-9xt9m_cda7e260-a520-4ac4-a1f4-b8e7684d2742/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:24 crc kubenswrapper[4730]: I0930 11:35:24.510382 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-2p45l_9a25d6ab-b556-4812-b876-92f7574e6da9/ssh-known-hosts-edpm-deployment/0.log" Sep 30 11:35:24 crc kubenswrapper[4730]: I0930 11:35:24.695925 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-plcgw_9a9e7f46-a278-48e1-9171-826bbba2fe2b/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:24 crc kubenswrapper[4730]: I0930 11:35:24.803686 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_5d6d300c-5857-4de1-8317-cded656bc61e/tempest-tests-tempest-tests-runner/0.log" Sep 30 11:35:24 crc kubenswrapper[4730]: I0930 11:35:24.958308 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_c10a38bd-1150-4f8a-b74d-5f6c7498387b/test-operator-logs-container/0.log" Sep 30 11:35:25 crc kubenswrapper[4730]: I0930 11:35:25.163342 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2_60081e7d-07fd-48ac-a4ae-46f05ab4d935/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:35:26 crc kubenswrapper[4730]: I0930 11:35:26.320823 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_db8ecf95-5b86-4775-85ee-a3da046e9dba/watcher-applier/0.log" Sep 30 11:35:26 crc kubenswrapper[4730]: I0930 11:35:26.562429 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_4caba7a4-0751-4410-88f1-084d5289d1c6/watcher-api-log/0.log" Sep 30 11:35:26 crc kubenswrapper[4730]: I0930 11:35:26.838244 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_a4f9bd21-5f86-4443-87be-eadb5d1c77f9/watcher-decision-engine/2.log" Sep 30 11:35:30 crc kubenswrapper[4730]: I0930 11:35:30.814149 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_a4f9bd21-5f86-4443-87be-eadb5d1c77f9/watcher-decision-engine/3.log" Sep 30 11:35:31 crc kubenswrapper[4730]: I0930 11:35:31.377454 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_4caba7a4-0751-4410-88f1-084d5289d1c6/watcher-api/0.log" Sep 30 11:35:32 crc kubenswrapper[4730]: I0930 11:35:32.849013 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_c7ad8423-527a-4195-8e63-d04e2bed66c9/memcached/0.log" Sep 30 11:35:37 crc kubenswrapper[4730]: I0930 11:35:37.381310 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:35:37 crc kubenswrapper[4730]: E0930 11:35:37.382098 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:35:48 crc kubenswrapper[4730]: I0930 11:35:48.385746 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:35:48 crc kubenswrapper[4730]: E0930 11:35:48.386444 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:35:57 crc kubenswrapper[4730]: I0930 11:35:57.683078 4730 generic.go:334] "Generic (PLEG): container finished" podID="30d121c5-00a1-4dc2-997f-18abf73f5ab5" containerID="0a96abf9fa9016e630601a8bcd7708f411f22d5dedf8350200c67e7ab47af270" exitCode=0 Sep 30 11:35:57 crc kubenswrapper[4730]: I0930 11:35:57.683186 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6kb84/crc-debug-j44np" event={"ID":"30d121c5-00a1-4dc2-997f-18abf73f5ab5","Type":"ContainerDied","Data":"0a96abf9fa9016e630601a8bcd7708f411f22d5dedf8350200c67e7ab47af270"} Sep 30 11:35:58 crc kubenswrapper[4730]: I0930 11:35:58.785332 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/crc-debug-j44np" Sep 30 11:35:58 crc kubenswrapper[4730]: I0930 11:35:58.841760 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6kb84/crc-debug-j44np"] Sep 30 11:35:58 crc kubenswrapper[4730]: I0930 11:35:58.851925 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6kb84/crc-debug-j44np"] Sep 30 11:35:58 crc kubenswrapper[4730]: I0930 11:35:58.864166 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhrkj\" (UniqueName: \"kubernetes.io/projected/30d121c5-00a1-4dc2-997f-18abf73f5ab5-kube-api-access-qhrkj\") pod \"30d121c5-00a1-4dc2-997f-18abf73f5ab5\" (UID: \"30d121c5-00a1-4dc2-997f-18abf73f5ab5\") " Sep 30 11:35:58 crc kubenswrapper[4730]: I0930 11:35:58.864366 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/30d121c5-00a1-4dc2-997f-18abf73f5ab5-host\") pod \"30d121c5-00a1-4dc2-997f-18abf73f5ab5\" (UID: \"30d121c5-00a1-4dc2-997f-18abf73f5ab5\") " Sep 30 11:35:58 crc kubenswrapper[4730]: I0930 11:35:58.864562 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/30d121c5-00a1-4dc2-997f-18abf73f5ab5-host" (OuterVolumeSpecName: "host") pod "30d121c5-00a1-4dc2-997f-18abf73f5ab5" (UID: "30d121c5-00a1-4dc2-997f-18abf73f5ab5"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 11:35:58 crc kubenswrapper[4730]: I0930 11:35:58.864830 4730 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/30d121c5-00a1-4dc2-997f-18abf73f5ab5-host\") on node \"crc\" DevicePath \"\"" Sep 30 11:35:58 crc kubenswrapper[4730]: I0930 11:35:58.869366 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30d121c5-00a1-4dc2-997f-18abf73f5ab5-kube-api-access-qhrkj" (OuterVolumeSpecName: "kube-api-access-qhrkj") pod "30d121c5-00a1-4dc2-997f-18abf73f5ab5" (UID: "30d121c5-00a1-4dc2-997f-18abf73f5ab5"). InnerVolumeSpecName "kube-api-access-qhrkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:35:58 crc kubenswrapper[4730]: I0930 11:35:58.966495 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhrkj\" (UniqueName: \"kubernetes.io/projected/30d121c5-00a1-4dc2-997f-18abf73f5ab5-kube-api-access-qhrkj\") on node \"crc\" DevicePath \"\"" Sep 30 11:35:59 crc kubenswrapper[4730]: I0930 11:35:59.709630 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9d9b5bd2427caa34b5c35a9799d4b56113de9a9c8483eba2737b0cfe9665f83" Sep 30 11:35:59 crc kubenswrapper[4730]: I0930 11:35:59.709729 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/crc-debug-j44np" Sep 30 11:36:00 crc kubenswrapper[4730]: I0930 11:36:00.038432 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6kb84/crc-debug-fnts9"] Sep 30 11:36:00 crc kubenswrapper[4730]: E0930 11:36:00.039081 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30d121c5-00a1-4dc2-997f-18abf73f5ab5" containerName="container-00" Sep 30 11:36:00 crc kubenswrapper[4730]: I0930 11:36:00.039104 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="30d121c5-00a1-4dc2-997f-18abf73f5ab5" containerName="container-00" Sep 30 11:36:00 crc kubenswrapper[4730]: I0930 11:36:00.039743 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="30d121c5-00a1-4dc2-997f-18abf73f5ab5" containerName="container-00" Sep 30 11:36:00 crc kubenswrapper[4730]: I0930 11:36:00.040966 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/crc-debug-fnts9" Sep 30 11:36:00 crc kubenswrapper[4730]: I0930 11:36:00.089951 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d67b5bd3-c49e-4eca-aadf-02b508cd128a-host\") pod \"crc-debug-fnts9\" (UID: \"d67b5bd3-c49e-4eca-aadf-02b508cd128a\") " pod="openshift-must-gather-6kb84/crc-debug-fnts9" Sep 30 11:36:00 crc kubenswrapper[4730]: I0930 11:36:00.090060 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stwzf\" (UniqueName: \"kubernetes.io/projected/d67b5bd3-c49e-4eca-aadf-02b508cd128a-kube-api-access-stwzf\") pod \"crc-debug-fnts9\" (UID: \"d67b5bd3-c49e-4eca-aadf-02b508cd128a\") " pod="openshift-must-gather-6kb84/crc-debug-fnts9" Sep 30 11:36:00 crc kubenswrapper[4730]: I0930 11:36:00.193402 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d67b5bd3-c49e-4eca-aadf-02b508cd128a-host\") pod \"crc-debug-fnts9\" (UID: \"d67b5bd3-c49e-4eca-aadf-02b508cd128a\") " pod="openshift-must-gather-6kb84/crc-debug-fnts9" Sep 30 11:36:00 crc kubenswrapper[4730]: I0930 11:36:00.193564 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stwzf\" (UniqueName: \"kubernetes.io/projected/d67b5bd3-c49e-4eca-aadf-02b508cd128a-kube-api-access-stwzf\") pod \"crc-debug-fnts9\" (UID: \"d67b5bd3-c49e-4eca-aadf-02b508cd128a\") " pod="openshift-must-gather-6kb84/crc-debug-fnts9" Sep 30 11:36:00 crc kubenswrapper[4730]: I0930 11:36:00.193694 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d67b5bd3-c49e-4eca-aadf-02b508cd128a-host\") pod \"crc-debug-fnts9\" (UID: \"d67b5bd3-c49e-4eca-aadf-02b508cd128a\") " pod="openshift-must-gather-6kb84/crc-debug-fnts9" Sep 30 11:36:00 crc kubenswrapper[4730]: I0930 11:36:00.226379 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stwzf\" (UniqueName: \"kubernetes.io/projected/d67b5bd3-c49e-4eca-aadf-02b508cd128a-kube-api-access-stwzf\") pod \"crc-debug-fnts9\" (UID: \"d67b5bd3-c49e-4eca-aadf-02b508cd128a\") " pod="openshift-must-gather-6kb84/crc-debug-fnts9" Sep 30 11:36:00 crc kubenswrapper[4730]: I0930 11:36:00.372197 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/crc-debug-fnts9" Sep 30 11:36:00 crc kubenswrapper[4730]: I0930 11:36:00.399094 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30d121c5-00a1-4dc2-997f-18abf73f5ab5" path="/var/lib/kubelet/pods/30d121c5-00a1-4dc2-997f-18abf73f5ab5/volumes" Sep 30 11:36:00 crc kubenswrapper[4730]: I0930 11:36:00.720989 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6kb84/crc-debug-fnts9" event={"ID":"d67b5bd3-c49e-4eca-aadf-02b508cd128a","Type":"ContainerStarted","Data":"4ed224529d8c8f99278b936c898c2f58cebcf6a4625b21bd63cb846bae788fed"} Sep 30 11:36:01 crc kubenswrapper[4730]: I0930 11:36:01.733727 4730 generic.go:334] "Generic (PLEG): container finished" podID="d67b5bd3-c49e-4eca-aadf-02b508cd128a" containerID="8b7bbf270c5a949769bb51ae97928c3acadfb2586931215ce11494076af40a3c" exitCode=0 Sep 30 11:36:01 crc kubenswrapper[4730]: I0930 11:36:01.733798 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6kb84/crc-debug-fnts9" event={"ID":"d67b5bd3-c49e-4eca-aadf-02b508cd128a","Type":"ContainerDied","Data":"8b7bbf270c5a949769bb51ae97928c3acadfb2586931215ce11494076af40a3c"} Sep 30 11:36:02 crc kubenswrapper[4730]: I0930 11:36:02.380699 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:36:02 crc kubenswrapper[4730]: E0930 11:36:02.381528 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:36:02 crc kubenswrapper[4730]: I0930 11:36:02.855402 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/crc-debug-fnts9" Sep 30 11:36:02 crc kubenswrapper[4730]: I0930 11:36:02.951554 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stwzf\" (UniqueName: \"kubernetes.io/projected/d67b5bd3-c49e-4eca-aadf-02b508cd128a-kube-api-access-stwzf\") pod \"d67b5bd3-c49e-4eca-aadf-02b508cd128a\" (UID: \"d67b5bd3-c49e-4eca-aadf-02b508cd128a\") " Sep 30 11:36:02 crc kubenswrapper[4730]: I0930 11:36:02.951643 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d67b5bd3-c49e-4eca-aadf-02b508cd128a-host\") pod \"d67b5bd3-c49e-4eca-aadf-02b508cd128a\" (UID: \"d67b5bd3-c49e-4eca-aadf-02b508cd128a\") " Sep 30 11:36:02 crc kubenswrapper[4730]: I0930 11:36:02.952285 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d67b5bd3-c49e-4eca-aadf-02b508cd128a-host" (OuterVolumeSpecName: "host") pod "d67b5bd3-c49e-4eca-aadf-02b508cd128a" (UID: "d67b5bd3-c49e-4eca-aadf-02b508cd128a"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 11:36:02 crc kubenswrapper[4730]: I0930 11:36:02.957082 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d67b5bd3-c49e-4eca-aadf-02b508cd128a-kube-api-access-stwzf" (OuterVolumeSpecName: "kube-api-access-stwzf") pod "d67b5bd3-c49e-4eca-aadf-02b508cd128a" (UID: "d67b5bd3-c49e-4eca-aadf-02b508cd128a"). InnerVolumeSpecName "kube-api-access-stwzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:36:03 crc kubenswrapper[4730]: I0930 11:36:03.053579 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stwzf\" (UniqueName: \"kubernetes.io/projected/d67b5bd3-c49e-4eca-aadf-02b508cd128a-kube-api-access-stwzf\") on node \"crc\" DevicePath \"\"" Sep 30 11:36:03 crc kubenswrapper[4730]: I0930 11:36:03.053883 4730 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d67b5bd3-c49e-4eca-aadf-02b508cd128a-host\") on node \"crc\" DevicePath \"\"" Sep 30 11:36:03 crc kubenswrapper[4730]: I0930 11:36:03.758728 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6kb84/crc-debug-fnts9" event={"ID":"d67b5bd3-c49e-4eca-aadf-02b508cd128a","Type":"ContainerDied","Data":"4ed224529d8c8f99278b936c898c2f58cebcf6a4625b21bd63cb846bae788fed"} Sep 30 11:36:03 crc kubenswrapper[4730]: I0930 11:36:03.758779 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ed224529d8c8f99278b936c898c2f58cebcf6a4625b21bd63cb846bae788fed" Sep 30 11:36:03 crc kubenswrapper[4730]: I0930 11:36:03.758776 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/crc-debug-fnts9" Sep 30 11:36:11 crc kubenswrapper[4730]: I0930 11:36:11.348229 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6kb84/crc-debug-fnts9"] Sep 30 11:36:11 crc kubenswrapper[4730]: I0930 11:36:11.356846 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6kb84/crc-debug-fnts9"] Sep 30 11:36:12 crc kubenswrapper[4730]: I0930 11:36:12.395107 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d67b5bd3-c49e-4eca-aadf-02b508cd128a" path="/var/lib/kubelet/pods/d67b5bd3-c49e-4eca-aadf-02b508cd128a/volumes" Sep 30 11:36:12 crc kubenswrapper[4730]: I0930 11:36:12.515210 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6kb84/crc-debug-xcm5k"] Sep 30 11:36:12 crc kubenswrapper[4730]: E0930 11:36:12.515656 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d67b5bd3-c49e-4eca-aadf-02b508cd128a" containerName="container-00" Sep 30 11:36:12 crc kubenswrapper[4730]: I0930 11:36:12.515675 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="d67b5bd3-c49e-4eca-aadf-02b508cd128a" containerName="container-00" Sep 30 11:36:12 crc kubenswrapper[4730]: I0930 11:36:12.515865 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="d67b5bd3-c49e-4eca-aadf-02b508cd128a" containerName="container-00" Sep 30 11:36:12 crc kubenswrapper[4730]: I0930 11:36:12.516559 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/crc-debug-xcm5k" Sep 30 11:36:12 crc kubenswrapper[4730]: I0930 11:36:12.610661 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rdgm\" (UniqueName: \"kubernetes.io/projected/9c6565b9-cd38-4663-9809-2840c0fa3b1e-kube-api-access-8rdgm\") pod \"crc-debug-xcm5k\" (UID: \"9c6565b9-cd38-4663-9809-2840c0fa3b1e\") " pod="openshift-must-gather-6kb84/crc-debug-xcm5k" Sep 30 11:36:12 crc kubenswrapper[4730]: I0930 11:36:12.611007 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c6565b9-cd38-4663-9809-2840c0fa3b1e-host\") pod \"crc-debug-xcm5k\" (UID: \"9c6565b9-cd38-4663-9809-2840c0fa3b1e\") " pod="openshift-must-gather-6kb84/crc-debug-xcm5k" Sep 30 11:36:12 crc kubenswrapper[4730]: I0930 11:36:12.713325 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c6565b9-cd38-4663-9809-2840c0fa3b1e-host\") pod \"crc-debug-xcm5k\" (UID: \"9c6565b9-cd38-4663-9809-2840c0fa3b1e\") " pod="openshift-must-gather-6kb84/crc-debug-xcm5k" Sep 30 11:36:12 crc kubenswrapper[4730]: I0930 11:36:12.713576 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c6565b9-cd38-4663-9809-2840c0fa3b1e-host\") pod \"crc-debug-xcm5k\" (UID: \"9c6565b9-cd38-4663-9809-2840c0fa3b1e\") " pod="openshift-must-gather-6kb84/crc-debug-xcm5k" Sep 30 11:36:12 crc kubenswrapper[4730]: I0930 11:36:12.713668 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rdgm\" (UniqueName: \"kubernetes.io/projected/9c6565b9-cd38-4663-9809-2840c0fa3b1e-kube-api-access-8rdgm\") pod \"crc-debug-xcm5k\" (UID: \"9c6565b9-cd38-4663-9809-2840c0fa3b1e\") " pod="openshift-must-gather-6kb84/crc-debug-xcm5k" Sep 30 11:36:12 crc kubenswrapper[4730]: I0930 11:36:12.752328 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rdgm\" (UniqueName: \"kubernetes.io/projected/9c6565b9-cd38-4663-9809-2840c0fa3b1e-kube-api-access-8rdgm\") pod \"crc-debug-xcm5k\" (UID: \"9c6565b9-cd38-4663-9809-2840c0fa3b1e\") " pod="openshift-must-gather-6kb84/crc-debug-xcm5k" Sep 30 11:36:12 crc kubenswrapper[4730]: I0930 11:36:12.837748 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/crc-debug-xcm5k" Sep 30 11:36:13 crc kubenswrapper[4730]: I0930 11:36:13.865173 4730 generic.go:334] "Generic (PLEG): container finished" podID="9c6565b9-cd38-4663-9809-2840c0fa3b1e" containerID="b2b37db17c0704209e89d79d860a5080a0d1262e592825edd3e78a261d2c2178" exitCode=0 Sep 30 11:36:13 crc kubenswrapper[4730]: I0930 11:36:13.865284 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6kb84/crc-debug-xcm5k" event={"ID":"9c6565b9-cd38-4663-9809-2840c0fa3b1e","Type":"ContainerDied","Data":"b2b37db17c0704209e89d79d860a5080a0d1262e592825edd3e78a261d2c2178"} Sep 30 11:36:13 crc kubenswrapper[4730]: I0930 11:36:13.866028 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6kb84/crc-debug-xcm5k" event={"ID":"9c6565b9-cd38-4663-9809-2840c0fa3b1e","Type":"ContainerStarted","Data":"91849e4d3fb1a5f9f548e292edd70ac251acd1e19a60f1d2e958664089c411e3"} Sep 30 11:36:13 crc kubenswrapper[4730]: I0930 11:36:13.915308 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6kb84/crc-debug-xcm5k"] Sep 30 11:36:13 crc kubenswrapper[4730]: I0930 11:36:13.922905 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6kb84/crc-debug-xcm5k"] Sep 30 11:36:14 crc kubenswrapper[4730]: I0930 11:36:14.980031 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/crc-debug-xcm5k" Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.163736 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c6565b9-cd38-4663-9809-2840c0fa3b1e-host\") pod \"9c6565b9-cd38-4663-9809-2840c0fa3b1e\" (UID: \"9c6565b9-cd38-4663-9809-2840c0fa3b1e\") " Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.164149 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c6565b9-cd38-4663-9809-2840c0fa3b1e-host" (OuterVolumeSpecName: "host") pod "9c6565b9-cd38-4663-9809-2840c0fa3b1e" (UID: "9c6565b9-cd38-4663-9809-2840c0fa3b1e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.164152 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rdgm\" (UniqueName: \"kubernetes.io/projected/9c6565b9-cd38-4663-9809-2840c0fa3b1e-kube-api-access-8rdgm\") pod \"9c6565b9-cd38-4663-9809-2840c0fa3b1e\" (UID: \"9c6565b9-cd38-4663-9809-2840c0fa3b1e\") " Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.171864 4730 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9c6565b9-cd38-4663-9809-2840c0fa3b1e-host\") on node \"crc\" DevicePath \"\"" Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.192395 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c6565b9-cd38-4663-9809-2840c0fa3b1e-kube-api-access-8rdgm" (OuterVolumeSpecName: "kube-api-access-8rdgm") pod "9c6565b9-cd38-4663-9809-2840c0fa3b1e" (UID: "9c6565b9-cd38-4663-9809-2840c0fa3b1e"). InnerVolumeSpecName "kube-api-access-8rdgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.273651 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rdgm\" (UniqueName: \"kubernetes.io/projected/9c6565b9-cd38-4663-9809-2840c0fa3b1e-kube-api-access-8rdgm\") on node \"crc\" DevicePath \"\"" Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.381922 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:36:15 crc kubenswrapper[4730]: E0930 11:36:15.382170 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.480786 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-nk8jc_978da32e-9bbe-453d-ba3f-32a89f23550e/kube-rbac-proxy/0.log" Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.558244 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-nk8jc_978da32e-9bbe-453d-ba3f-32a89f23550e/manager/0.log" Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.656342 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-qx68r_04589829-1e63-438e-b6e8-bdaa6f5ebc19/kube-rbac-proxy/0.log" Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.761993 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-qx68r_04589829-1e63-438e-b6e8-bdaa6f5ebc19/manager/0.log" Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.886933 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/util/0.log" Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.888775 4730 scope.go:117] "RemoveContainer" containerID="b2b37db17c0704209e89d79d860a5080a0d1262e592825edd3e78a261d2c2178" Sep 30 11:36:15 crc kubenswrapper[4730]: I0930 11:36:15.888801 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/crc-debug-xcm5k" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.009822 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/util/0.log" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.023192 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/pull/0.log" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.069815 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/pull/0.log" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.218832 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/pull/0.log" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.224312 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/util/0.log" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.261790 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/extract/0.log" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.435022 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c6565b9-cd38-4663-9809-2840c0fa3b1e" path="/var/lib/kubelet/pods/9c6565b9-cd38-4663-9809-2840c0fa3b1e/volumes" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.470193 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-tglx9_2758692b-990d-4330-9765-22614cd379a0/manager/0.log" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.475066 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-tglx9_2758692b-990d-4330-9765-22614cd379a0/kube-rbac-proxy/0.log" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.550742 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-jdfxz_0f748696-3e59-4b53-a5d2-1dce4b0b6a3a/kube-rbac-proxy/0.log" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.677578 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-p957g_419a8cdd-e50e-42f8-b913-61214be0a9a5/kube-rbac-proxy/0.log" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.705073 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-jdfxz_0f748696-3e59-4b53-a5d2-1dce4b0b6a3a/manager/0.log" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.748907 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-p957g_419a8cdd-e50e-42f8-b913-61214be0a9a5/manager/0.log" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.871822 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-mwgsf_db4dd5c5-bcc5-4782-acf0-42d686edd287/kube-rbac-proxy/0.log" Sep 30 11:36:16 crc kubenswrapper[4730]: I0930 11:36:16.914534 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-mwgsf_db4dd5c5-bcc5-4782-acf0-42d686edd287/manager/0.log" Sep 30 11:36:17 crc kubenswrapper[4730]: I0930 11:36:17.035521 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-rxq94_4201b1a7-e458-49b2-9536-91e6db49ea36/kube-rbac-proxy/0.log" Sep 30 11:36:17 crc kubenswrapper[4730]: I0930 11:36:17.165717 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-72mvr_3cc5b4f8-09e7-44a7-aa40-f173ad8fb157/kube-rbac-proxy/0.log" Sep 30 11:36:17 crc kubenswrapper[4730]: I0930 11:36:17.257416 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-rxq94_4201b1a7-e458-49b2-9536-91e6db49ea36/manager/0.log" Sep 30 11:36:17 crc kubenswrapper[4730]: I0930 11:36:17.289400 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-72mvr_3cc5b4f8-09e7-44a7-aa40-f173ad8fb157/manager/0.log" Sep 30 11:36:17 crc kubenswrapper[4730]: I0930 11:36:17.386568 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-qsgdr_c24a4e1a-10db-44f2-9de6-16f4081a5609/kube-rbac-proxy/0.log" Sep 30 11:36:17 crc kubenswrapper[4730]: I0930 11:36:17.466846 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-qsgdr_c24a4e1a-10db-44f2-9de6-16f4081a5609/manager/0.log" Sep 30 11:36:17 crc kubenswrapper[4730]: I0930 11:36:17.573225 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-bgczv_27424124-82bf-42fa-a77b-fdbd44f5c24b/kube-rbac-proxy/0.log" Sep 30 11:36:17 crc kubenswrapper[4730]: I0930 11:36:17.605830 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-bgczv_27424124-82bf-42fa-a77b-fdbd44f5c24b/manager/0.log" Sep 30 11:36:17 crc kubenswrapper[4730]: I0930 11:36:17.646084 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-pr4r7_81c8b722-d28f-42d4-8bc0-b82b9eb34500/kube-rbac-proxy/0.log" Sep 30 11:36:17 crc kubenswrapper[4730]: I0930 11:36:17.789161 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-pr4r7_81c8b722-d28f-42d4-8bc0-b82b9eb34500/manager/0.log" Sep 30 11:36:17 crc kubenswrapper[4730]: I0930 11:36:17.852189 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-6v77r_8c6c5d61-fd4e-4b83-9b60-7681c6fc19f3/kube-rbac-proxy/0.log" Sep 30 11:36:17 crc kubenswrapper[4730]: I0930 11:36:17.883589 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-6v77r_8c6c5d61-fd4e-4b83-9b60-7681c6fc19f3/manager/0.log" Sep 30 11:36:18 crc kubenswrapper[4730]: I0930 11:36:18.006010 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-z5tb9_7ed8122e-042b-4574-9522-99557d55eedc/kube-rbac-proxy/0.log" Sep 30 11:36:18 crc kubenswrapper[4730]: I0930 11:36:18.111335 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-z5tb9_7ed8122e-042b-4574-9522-99557d55eedc/manager/0.log" Sep 30 11:36:18 crc kubenswrapper[4730]: I0930 11:36:18.215986 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-g7zc5_0f10a085-8ce3-407b-a2ec-b6fabc38bc9f/manager/0.log" Sep 30 11:36:18 crc kubenswrapper[4730]: I0930 11:36:18.245012 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-g7zc5_0f10a085-8ce3-407b-a2ec-b6fabc38bc9f/kube-rbac-proxy/0.log" Sep 30 11:36:18 crc kubenswrapper[4730]: I0930 11:36:18.319792 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-b5jlg_b6969510-2750-4466-b064-7cb67a4acf7e/kube-rbac-proxy/0.log" Sep 30 11:36:18 crc kubenswrapper[4730]: I0930 11:36:18.413847 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-b5jlg_b6969510-2750-4466-b064-7cb67a4acf7e/manager/0.log" Sep 30 11:36:18 crc kubenswrapper[4730]: I0930 11:36:18.560243 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-79b5487686-nzxks_dc289df2-abec-4f24-a873-82523204cb2b/kube-rbac-proxy/0.log" Sep 30 11:36:18 crc kubenswrapper[4730]: I0930 11:36:18.896483 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-f85b56ffc-k4dxb_020b365e-9b85-4464-b95f-47e12c8812c5/kube-rbac-proxy/0.log" Sep 30 11:36:19 crc kubenswrapper[4730]: I0930 11:36:19.036085 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-f85b56ffc-k4dxb_020b365e-9b85-4464-b95f-47e12c8812c5/operator/0.log" Sep 30 11:36:19 crc kubenswrapper[4730]: I0930 11:36:19.068215 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-b5xnk_6c48593a-c4fc-49ae-a91b-4a5f57667d3f/registry-server/0.log" Sep 30 11:36:19 crc kubenswrapper[4730]: I0930 11:36:19.224589 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-tsgfd_f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e/kube-rbac-proxy/0.log" Sep 30 11:36:19 crc kubenswrapper[4730]: I0930 11:36:19.305987 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-tsgfd_f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e/manager/0.log" Sep 30 11:36:19 crc kubenswrapper[4730]: I0930 11:36:19.388391 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-zbmbb_28fcf9ea-7f63-4add-bb31-99af57fcce2c/kube-rbac-proxy/0.log" Sep 30 11:36:19 crc kubenswrapper[4730]: I0930 11:36:19.466317 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-zbmbb_28fcf9ea-7f63-4add-bb31-99af57fcce2c/manager/0.log" Sep 30 11:36:19 crc kubenswrapper[4730]: I0930 11:36:19.623372 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-d4vnn_f26267a9-08cf-4ff8-8fab-d1bfe01dbd65/operator/0.log" Sep 30 11:36:19 crc kubenswrapper[4730]: I0930 11:36:19.750156 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-dsvl6_a081f8cc-4fb8-457c-84de-2c7ba2c84821/kube-rbac-proxy/0.log" Sep 30 11:36:19 crc kubenswrapper[4730]: I0930 11:36:19.796051 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-dsvl6_a081f8cc-4fb8-457c-84de-2c7ba2c84821/manager/0.log" Sep 30 11:36:19 crc kubenswrapper[4730]: I0930 11:36:19.867003 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-79b5487686-nzxks_dc289df2-abec-4f24-a873-82523204cb2b/manager/0.log" Sep 30 11:36:19 crc kubenswrapper[4730]: I0930 11:36:19.889996 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-nwvfh_e6074e1b-4192-43a7-b391-f4112d2486bf/kube-rbac-proxy/0.log" Sep 30 11:36:20 crc kubenswrapper[4730]: I0930 11:36:20.074446 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-djzc5_f3fcce5a-2080-44f6-971c-d1bda3dd0fe0/kube-rbac-proxy/0.log" Sep 30 11:36:20 crc kubenswrapper[4730]: I0930 11:36:20.077321 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-djzc5_f3fcce5a-2080-44f6-971c-d1bda3dd0fe0/manager/0.log" Sep 30 11:36:20 crc kubenswrapper[4730]: I0930 11:36:20.196675 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-nwvfh_e6074e1b-4192-43a7-b391-f4112d2486bf/manager/0.log" Sep 30 11:36:20 crc kubenswrapper[4730]: I0930 11:36:20.226402 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6c4b8dd4dc-tbxsc_ba719558-c698-41e9-8b5e-a3449a6f9a7c/kube-rbac-proxy/0.log" Sep 30 11:36:20 crc kubenswrapper[4730]: I0930 11:36:20.348274 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6c4b8dd4dc-tbxsc_ba719558-c698-41e9-8b5e-a3449a6f9a7c/manager/0.log" Sep 30 11:36:27 crc kubenswrapper[4730]: I0930 11:36:27.381411 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:36:27 crc kubenswrapper[4730]: E0930 11:36:27.381904 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:36:36 crc kubenswrapper[4730]: I0930 11:36:36.150198 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-27l2q_eb3285d0-7f84-46d0-9c21-136e077b813a/control-plane-machine-set-operator/0.log" Sep 30 11:36:36 crc kubenswrapper[4730]: I0930 11:36:36.333678 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-r86xc_789ee928-afa8-424d-8810-6a04b2a7d5d6/machine-api-operator/0.log" Sep 30 11:36:36 crc kubenswrapper[4730]: I0930 11:36:36.339220 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-r86xc_789ee928-afa8-424d-8810-6a04b2a7d5d6/kube-rbac-proxy/0.log" Sep 30 11:36:40 crc kubenswrapper[4730]: I0930 11:36:40.381661 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:36:40 crc kubenswrapper[4730]: E0930 11:36:40.382577 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:36:49 crc kubenswrapper[4730]: I0930 11:36:49.265944 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-k5vht_66889f3c-938c-46e9-a430-801bb731b19e/cert-manager-controller/0.log" Sep 30 11:36:49 crc kubenswrapper[4730]: I0930 11:36:49.435949 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-9lmxl_5461a244-b4c4-48fb-9590-ebc310a13761/cert-manager-cainjector/0.log" Sep 30 11:36:49 crc kubenswrapper[4730]: I0930 11:36:49.492514 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-p92bh_687b4645-7901-4987-adde-e3db6b502a52/cert-manager-webhook/0.log" Sep 30 11:36:55 crc kubenswrapper[4730]: I0930 11:36:55.380734 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:36:55 crc kubenswrapper[4730]: E0930 11:36:55.381569 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:37:01 crc kubenswrapper[4730]: I0930 11:37:01.353678 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-2tzsp_63a29940-b6e3-47cf-b71b-f010806ae889/nmstate-console-plugin/0.log" Sep 30 11:37:01 crc kubenswrapper[4730]: I0930 11:37:01.533319 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-5j4jt_a4994298-b2c8-4c02-9196-58d0cd805da1/nmstate-handler/0.log" Sep 30 11:37:01 crc kubenswrapper[4730]: I0930 11:37:01.613242 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-5pc6s_24231826-1571-4b73-ae50-bc95035399b2/nmstate-metrics/0.log" Sep 30 11:37:01 crc kubenswrapper[4730]: I0930 11:37:01.629997 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-5pc6s_24231826-1571-4b73-ae50-bc95035399b2/kube-rbac-proxy/0.log" Sep 30 11:37:01 crc kubenswrapper[4730]: I0930 11:37:01.788566 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-krpvk_4d5a580e-a60b-4854-838b-f51fb9e32536/nmstate-operator/0.log" Sep 30 11:37:01 crc kubenswrapper[4730]: I0930 11:37:01.814012 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-6znsv_05ca167b-1d36-4bd0-82f0-07b82f5e9a7d/nmstate-webhook/0.log" Sep 30 11:37:06 crc kubenswrapper[4730]: I0930 11:37:06.391171 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:37:06 crc kubenswrapper[4730]: E0930 11:37:06.391790 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:37:15 crc kubenswrapper[4730]: I0930 11:37:15.823204 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-hp2xq_b5cc3ceb-ad9c-4b2d-b272-913a1856afcc/kube-rbac-proxy/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.024920 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-hp2xq_b5cc3ceb-ad9c-4b2d-b272-913a1856afcc/controller/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.031351 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-frr-files/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.180613 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-metrics/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.185991 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-frr-files/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.198927 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-reloader/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.237350 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-reloader/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.452026 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-reloader/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.463437 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-frr-files/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.491539 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-metrics/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.500190 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-metrics/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.714017 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-frr-files/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.732040 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-reloader/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.733881 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-metrics/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.748485 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/controller/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.916666 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/frr-metrics/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.989836 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/kube-rbac-proxy/0.log" Sep 30 11:37:16 crc kubenswrapper[4730]: I0930 11:37:16.994116 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/kube-rbac-proxy-frr/0.log" Sep 30 11:37:17 crc kubenswrapper[4730]: I0930 11:37:17.172008 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/reloader/0.log" Sep 30 11:37:17 crc kubenswrapper[4730]: I0930 11:37:17.179963 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-w96cg_c7e58e36-b0e5-4531-9e55-bf09a14d556e/frr-k8s-webhook-server/0.log" Sep 30 11:37:17 crc kubenswrapper[4730]: I0930 11:37:17.438527 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-574c858f4-cvlx7_b7580b03-c29b-4b03-84c7-726fecd55064/manager/0.log" Sep 30 11:37:17 crc kubenswrapper[4730]: I0930 11:37:17.597542 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-666698b878-dkzwg_5c1b3278-f9f8-41c3-a42f-d789aaaba651/webhook-server/0.log" Sep 30 11:37:17 crc kubenswrapper[4730]: I0930 11:37:17.707728 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bgftk_fac4c47c-e141-4fab-a69b-de3467d806ce/kube-rbac-proxy/0.log" Sep 30 11:37:18 crc kubenswrapper[4730]: I0930 11:37:18.281083 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bgftk_fac4c47c-e141-4fab-a69b-de3467d806ce/speaker/0.log" Sep 30 11:37:18 crc kubenswrapper[4730]: I0930 11:37:18.708430 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/frr/0.log" Sep 30 11:37:19 crc kubenswrapper[4730]: I0930 11:37:19.381325 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:37:19 crc kubenswrapper[4730]: E0930 11:37:19.381857 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:37:29 crc kubenswrapper[4730]: I0930 11:37:29.852926 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/util/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.006262 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/util/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.014832 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/pull/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.017307 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/pull/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.205270 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/extract/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.284526 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/util/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.331315 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/pull/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.416716 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/util/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.521738 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/pull/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.562044 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/pull/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.562421 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/util/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.749554 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/util/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.750340 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/pull/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.755733 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/extract/0.log" Sep 30 11:37:30 crc kubenswrapper[4730]: I0930 11:37:30.918953 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/extract-utilities/0.log" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.070604 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/extract-utilities/0.log" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.075792 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/extract-content/0.log" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.164683 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/extract-content/0.log" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.243733 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/extract-utilities/0.log" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.270847 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/extract-content/0.log" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.381424 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:37:31 crc kubenswrapper[4730]: E0930 11:37:31.381686 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.398321 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/registry-server/0.log" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.475181 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/extract-utilities/0.log" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.598210 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/extract-content/0.log" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.622500 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/extract-content/0.log" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.632499 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/extract-utilities/0.log" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.758629 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/extract-utilities/0.log" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.774399 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/extract-content/0.log" Sep 30 11:37:31 crc kubenswrapper[4730]: I0930 11:37:31.987123 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/util/0.log" Sep 30 11:37:32 crc kubenswrapper[4730]: I0930 11:37:32.214439 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/pull/0.log" Sep 30 11:37:32 crc kubenswrapper[4730]: I0930 11:37:32.265839 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/util/0.log" Sep 30 11:37:32 crc kubenswrapper[4730]: I0930 11:37:32.274855 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/pull/0.log" Sep 30 11:37:32 crc kubenswrapper[4730]: I0930 11:37:32.477560 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/pull/0.log" Sep 30 11:37:32 crc kubenswrapper[4730]: I0930 11:37:32.479810 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/extract/0.log" Sep 30 11:37:32 crc kubenswrapper[4730]: I0930 11:37:32.500954 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/util/0.log" Sep 30 11:37:32 crc kubenswrapper[4730]: I0930 11:37:32.739194 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-6gt5z_a28d8376-b2f6-44da-b872-34bd96b74108/marketplace-operator/0.log" Sep 30 11:37:32 crc kubenswrapper[4730]: I0930 11:37:32.795471 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/registry-server/0.log" Sep 30 11:37:32 crc kubenswrapper[4730]: I0930 11:37:32.924636 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/extract-utilities/0.log" Sep 30 11:37:33 crc kubenswrapper[4730]: I0930 11:37:33.113178 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/extract-utilities/0.log" Sep 30 11:37:33 crc kubenswrapper[4730]: I0930 11:37:33.128531 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/extract-content/0.log" Sep 30 11:37:33 crc kubenswrapper[4730]: I0930 11:37:33.176972 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/extract-content/0.log" Sep 30 11:37:33 crc kubenswrapper[4730]: I0930 11:37:33.300709 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/extract-content/0.log" Sep 30 11:37:33 crc kubenswrapper[4730]: I0930 11:37:33.318882 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/extract-utilities/0.log" Sep 30 11:37:33 crc kubenswrapper[4730]: I0930 11:37:33.471286 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/registry-server/0.log" Sep 30 11:37:33 crc kubenswrapper[4730]: I0930 11:37:33.479649 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/extract-utilities/0.log" Sep 30 11:37:33 crc kubenswrapper[4730]: I0930 11:37:33.631164 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/extract-utilities/0.log" Sep 30 11:37:33 crc kubenswrapper[4730]: I0930 11:37:33.639979 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/extract-content/0.log" Sep 30 11:37:33 crc kubenswrapper[4730]: I0930 11:37:33.680145 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/extract-content/0.log" Sep 30 11:37:33 crc kubenswrapper[4730]: I0930 11:37:33.816250 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/extract-content/0.log" Sep 30 11:37:33 crc kubenswrapper[4730]: I0930 11:37:33.839555 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/extract-utilities/0.log" Sep 30 11:37:34 crc kubenswrapper[4730]: I0930 11:37:34.289983 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/registry-server/0.log" Sep 30 11:37:45 crc kubenswrapper[4730]: I0930 11:37:45.382719 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-kv7bf_ad98526d-6d03-4564-849c-5ae4d06519e2/prometheus-operator/0.log" Sep 30 11:37:45 crc kubenswrapper[4730]: I0930 11:37:45.515524 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk_e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a/prometheus-operator-admission-webhook/0.log" Sep 30 11:37:45 crc kubenswrapper[4730]: I0930 11:37:45.562788 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4_5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d/prometheus-operator-admission-webhook/0.log" Sep 30 11:37:45 crc kubenswrapper[4730]: I0930 11:37:45.722449 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-b68z8_2b9be7f9-7237-4a96-b0a3-9052ab5b0eea/operator/0.log" Sep 30 11:37:45 crc kubenswrapper[4730]: I0930 11:37:45.734260 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-5gr7s_1f3b0abd-5e6c-4afe-9122-606234241336/perses-operator/0.log" Sep 30 11:37:46 crc kubenswrapper[4730]: I0930 11:37:46.393729 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:37:46 crc kubenswrapper[4730]: E0930 11:37:46.394048 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:38:01 crc kubenswrapper[4730]: I0930 11:38:01.382362 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:38:01 crc kubenswrapper[4730]: E0930 11:38:01.383433 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:38:13 crc kubenswrapper[4730]: I0930 11:38:13.381214 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:38:13 crc kubenswrapper[4730]: E0930 11:38:13.381938 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:38:24 crc kubenswrapper[4730]: I0930 11:38:24.385230 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:38:24 crc kubenswrapper[4730]: E0930 11:38:24.386107 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:38:38 crc kubenswrapper[4730]: I0930 11:38:38.380984 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:38:38 crc kubenswrapper[4730]: E0930 11:38:38.381970 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.481941 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n67pn"] Sep 30 11:38:41 crc kubenswrapper[4730]: E0930 11:38:41.482947 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c6565b9-cd38-4663-9809-2840c0fa3b1e" containerName="container-00" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.482974 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c6565b9-cd38-4663-9809-2840c0fa3b1e" containerName="container-00" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.483335 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c6565b9-cd38-4663-9809-2840c0fa3b1e" containerName="container-00" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.485806 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.506489 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n67pn"] Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.577994 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd4dd146-ddba-4589-828a-66412e99105c-catalog-content\") pod \"community-operators-n67pn\" (UID: \"cd4dd146-ddba-4589-828a-66412e99105c\") " pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.578261 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghnlv\" (UniqueName: \"kubernetes.io/projected/cd4dd146-ddba-4589-828a-66412e99105c-kube-api-access-ghnlv\") pod \"community-operators-n67pn\" (UID: \"cd4dd146-ddba-4589-828a-66412e99105c\") " pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.578409 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd4dd146-ddba-4589-828a-66412e99105c-utilities\") pod \"community-operators-n67pn\" (UID: \"cd4dd146-ddba-4589-828a-66412e99105c\") " pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.679990 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd4dd146-ddba-4589-828a-66412e99105c-catalog-content\") pod \"community-operators-n67pn\" (UID: \"cd4dd146-ddba-4589-828a-66412e99105c\") " pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.680051 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghnlv\" (UniqueName: \"kubernetes.io/projected/cd4dd146-ddba-4589-828a-66412e99105c-kube-api-access-ghnlv\") pod \"community-operators-n67pn\" (UID: \"cd4dd146-ddba-4589-828a-66412e99105c\") " pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.680216 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd4dd146-ddba-4589-828a-66412e99105c-utilities\") pod \"community-operators-n67pn\" (UID: \"cd4dd146-ddba-4589-828a-66412e99105c\") " pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.680770 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd4dd146-ddba-4589-828a-66412e99105c-utilities\") pod \"community-operators-n67pn\" (UID: \"cd4dd146-ddba-4589-828a-66412e99105c\") " pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.680941 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd4dd146-ddba-4589-828a-66412e99105c-catalog-content\") pod \"community-operators-n67pn\" (UID: \"cd4dd146-ddba-4589-828a-66412e99105c\") " pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.705382 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghnlv\" (UniqueName: \"kubernetes.io/projected/cd4dd146-ddba-4589-828a-66412e99105c-kube-api-access-ghnlv\") pod \"community-operators-n67pn\" (UID: \"cd4dd146-ddba-4589-828a-66412e99105c\") " pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:41 crc kubenswrapper[4730]: I0930 11:38:41.815514 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:42 crc kubenswrapper[4730]: I0930 11:38:42.407198 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n67pn"] Sep 30 11:38:43 crc kubenswrapper[4730]: I0930 11:38:43.365557 4730 generic.go:334] "Generic (PLEG): container finished" podID="cd4dd146-ddba-4589-828a-66412e99105c" containerID="68c321389bc160641adc9746f909ea81e7d177fc2fea2fa0ac27c5a79be9460f" exitCode=0 Sep 30 11:38:43 crc kubenswrapper[4730]: I0930 11:38:43.365660 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n67pn" event={"ID":"cd4dd146-ddba-4589-828a-66412e99105c","Type":"ContainerDied","Data":"68c321389bc160641adc9746f909ea81e7d177fc2fea2fa0ac27c5a79be9460f"} Sep 30 11:38:43 crc kubenswrapper[4730]: I0930 11:38:43.367251 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n67pn" event={"ID":"cd4dd146-ddba-4589-828a-66412e99105c","Type":"ContainerStarted","Data":"805d048d2e9ef39fc498ebe8607ce7b9202cf99e260d66c83d2e3819e615cd2b"} Sep 30 11:38:43 crc kubenswrapper[4730]: I0930 11:38:43.370824 4730 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 11:38:45 crc kubenswrapper[4730]: I0930 11:38:45.388088 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n67pn" event={"ID":"cd4dd146-ddba-4589-828a-66412e99105c","Type":"ContainerStarted","Data":"71f16d10df150d0e68db80ec655c2a4bcbc9c8613d679ff061e3ee14210af7fd"} Sep 30 11:38:46 crc kubenswrapper[4730]: I0930 11:38:46.408670 4730 generic.go:334] "Generic (PLEG): container finished" podID="cd4dd146-ddba-4589-828a-66412e99105c" containerID="71f16d10df150d0e68db80ec655c2a4bcbc9c8613d679ff061e3ee14210af7fd" exitCode=0 Sep 30 11:38:46 crc kubenswrapper[4730]: I0930 11:38:46.409025 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n67pn" event={"ID":"cd4dd146-ddba-4589-828a-66412e99105c","Type":"ContainerDied","Data":"71f16d10df150d0e68db80ec655c2a4bcbc9c8613d679ff061e3ee14210af7fd"} Sep 30 11:38:47 crc kubenswrapper[4730]: I0930 11:38:47.420539 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n67pn" event={"ID":"cd4dd146-ddba-4589-828a-66412e99105c","Type":"ContainerStarted","Data":"cbdd9897b40a7f4a49454370c6b8642b76a86a3ff8fe59eb59fcdb28abb31b60"} Sep 30 11:38:47 crc kubenswrapper[4730]: I0930 11:38:47.454603 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n67pn" podStartSLOduration=3.01533091 podStartE2EDuration="6.454576453s" podCreationTimestamp="2025-09-30 11:38:41 +0000 UTC" firstStartedPulling="2025-09-30 11:38:43.370054248 +0000 UTC m=+6567.703314281" lastFinishedPulling="2025-09-30 11:38:46.809299811 +0000 UTC m=+6571.142559824" observedRunningTime="2025-09-30 11:38:47.448674547 +0000 UTC m=+6571.781934570" watchObservedRunningTime="2025-09-30 11:38:47.454576453 +0000 UTC m=+6571.787836466" Sep 30 11:38:49 crc kubenswrapper[4730]: I0930 11:38:49.380810 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:38:49 crc kubenswrapper[4730]: E0930 11:38:49.381519 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:38:51 crc kubenswrapper[4730]: I0930 11:38:51.816605 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:51 crc kubenswrapper[4730]: I0930 11:38:51.817345 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:51 crc kubenswrapper[4730]: I0930 11:38:51.913208 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:52 crc kubenswrapper[4730]: I0930 11:38:52.547416 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:52 crc kubenswrapper[4730]: I0930 11:38:52.603919 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n67pn"] Sep 30 11:38:54 crc kubenswrapper[4730]: I0930 11:38:54.506509 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n67pn" podUID="cd4dd146-ddba-4589-828a-66412e99105c" containerName="registry-server" containerID="cri-o://cbdd9897b40a7f4a49454370c6b8642b76a86a3ff8fe59eb59fcdb28abb31b60" gracePeriod=2 Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.027191 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.090177 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd4dd146-ddba-4589-828a-66412e99105c-utilities\") pod \"cd4dd146-ddba-4589-828a-66412e99105c\" (UID: \"cd4dd146-ddba-4589-828a-66412e99105c\") " Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.090696 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd4dd146-ddba-4589-828a-66412e99105c-catalog-content\") pod \"cd4dd146-ddba-4589-828a-66412e99105c\" (UID: \"cd4dd146-ddba-4589-828a-66412e99105c\") " Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.090749 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghnlv\" (UniqueName: \"kubernetes.io/projected/cd4dd146-ddba-4589-828a-66412e99105c-kube-api-access-ghnlv\") pod \"cd4dd146-ddba-4589-828a-66412e99105c\" (UID: \"cd4dd146-ddba-4589-828a-66412e99105c\") " Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.093379 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd4dd146-ddba-4589-828a-66412e99105c-utilities" (OuterVolumeSpecName: "utilities") pod "cd4dd146-ddba-4589-828a-66412e99105c" (UID: "cd4dd146-ddba-4589-828a-66412e99105c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.097494 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd4dd146-ddba-4589-828a-66412e99105c-kube-api-access-ghnlv" (OuterVolumeSpecName: "kube-api-access-ghnlv") pod "cd4dd146-ddba-4589-828a-66412e99105c" (UID: "cd4dd146-ddba-4589-828a-66412e99105c"). InnerVolumeSpecName "kube-api-access-ghnlv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.193176 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghnlv\" (UniqueName: \"kubernetes.io/projected/cd4dd146-ddba-4589-828a-66412e99105c-kube-api-access-ghnlv\") on node \"crc\" DevicePath \"\"" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.193207 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd4dd146-ddba-4589-828a-66412e99105c-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.244267 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd4dd146-ddba-4589-828a-66412e99105c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cd4dd146-ddba-4589-828a-66412e99105c" (UID: "cd4dd146-ddba-4589-828a-66412e99105c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.294785 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd4dd146-ddba-4589-828a-66412e99105c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.521983 4730 generic.go:334] "Generic (PLEG): container finished" podID="cd4dd146-ddba-4589-828a-66412e99105c" containerID="cbdd9897b40a7f4a49454370c6b8642b76a86a3ff8fe59eb59fcdb28abb31b60" exitCode=0 Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.522304 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n67pn" event={"ID":"cd4dd146-ddba-4589-828a-66412e99105c","Type":"ContainerDied","Data":"cbdd9897b40a7f4a49454370c6b8642b76a86a3ff8fe59eb59fcdb28abb31b60"} Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.522334 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n67pn" event={"ID":"cd4dd146-ddba-4589-828a-66412e99105c","Type":"ContainerDied","Data":"805d048d2e9ef39fc498ebe8607ce7b9202cf99e260d66c83d2e3819e615cd2b"} Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.522358 4730 scope.go:117] "RemoveContainer" containerID="cbdd9897b40a7f4a49454370c6b8642b76a86a3ff8fe59eb59fcdb28abb31b60" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.522501 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n67pn" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.563414 4730 scope.go:117] "RemoveContainer" containerID="71f16d10df150d0e68db80ec655c2a4bcbc9c8613d679ff061e3ee14210af7fd" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.576993 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n67pn"] Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.587296 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n67pn"] Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.594143 4730 scope.go:117] "RemoveContainer" containerID="68c321389bc160641adc9746f909ea81e7d177fc2fea2fa0ac27c5a79be9460f" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.649264 4730 scope.go:117] "RemoveContainer" containerID="cbdd9897b40a7f4a49454370c6b8642b76a86a3ff8fe59eb59fcdb28abb31b60" Sep 30 11:38:55 crc kubenswrapper[4730]: E0930 11:38:55.650062 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbdd9897b40a7f4a49454370c6b8642b76a86a3ff8fe59eb59fcdb28abb31b60\": container with ID starting with cbdd9897b40a7f4a49454370c6b8642b76a86a3ff8fe59eb59fcdb28abb31b60 not found: ID does not exist" containerID="cbdd9897b40a7f4a49454370c6b8642b76a86a3ff8fe59eb59fcdb28abb31b60" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.650108 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbdd9897b40a7f4a49454370c6b8642b76a86a3ff8fe59eb59fcdb28abb31b60"} err="failed to get container status \"cbdd9897b40a7f4a49454370c6b8642b76a86a3ff8fe59eb59fcdb28abb31b60\": rpc error: code = NotFound desc = could not find container \"cbdd9897b40a7f4a49454370c6b8642b76a86a3ff8fe59eb59fcdb28abb31b60\": container with ID starting with cbdd9897b40a7f4a49454370c6b8642b76a86a3ff8fe59eb59fcdb28abb31b60 not found: ID does not exist" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.650137 4730 scope.go:117] "RemoveContainer" containerID="71f16d10df150d0e68db80ec655c2a4bcbc9c8613d679ff061e3ee14210af7fd" Sep 30 11:38:55 crc kubenswrapper[4730]: E0930 11:38:55.650649 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71f16d10df150d0e68db80ec655c2a4bcbc9c8613d679ff061e3ee14210af7fd\": container with ID starting with 71f16d10df150d0e68db80ec655c2a4bcbc9c8613d679ff061e3ee14210af7fd not found: ID does not exist" containerID="71f16d10df150d0e68db80ec655c2a4bcbc9c8613d679ff061e3ee14210af7fd" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.650701 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71f16d10df150d0e68db80ec655c2a4bcbc9c8613d679ff061e3ee14210af7fd"} err="failed to get container status \"71f16d10df150d0e68db80ec655c2a4bcbc9c8613d679ff061e3ee14210af7fd\": rpc error: code = NotFound desc = could not find container \"71f16d10df150d0e68db80ec655c2a4bcbc9c8613d679ff061e3ee14210af7fd\": container with ID starting with 71f16d10df150d0e68db80ec655c2a4bcbc9c8613d679ff061e3ee14210af7fd not found: ID does not exist" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.650728 4730 scope.go:117] "RemoveContainer" containerID="68c321389bc160641adc9746f909ea81e7d177fc2fea2fa0ac27c5a79be9460f" Sep 30 11:38:55 crc kubenswrapper[4730]: E0930 11:38:55.651079 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68c321389bc160641adc9746f909ea81e7d177fc2fea2fa0ac27c5a79be9460f\": container with ID starting with 68c321389bc160641adc9746f909ea81e7d177fc2fea2fa0ac27c5a79be9460f not found: ID does not exist" containerID="68c321389bc160641adc9746f909ea81e7d177fc2fea2fa0ac27c5a79be9460f" Sep 30 11:38:55 crc kubenswrapper[4730]: I0930 11:38:55.651129 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68c321389bc160641adc9746f909ea81e7d177fc2fea2fa0ac27c5a79be9460f"} err="failed to get container status \"68c321389bc160641adc9746f909ea81e7d177fc2fea2fa0ac27c5a79be9460f\": rpc error: code = NotFound desc = could not find container \"68c321389bc160641adc9746f909ea81e7d177fc2fea2fa0ac27c5a79be9460f\": container with ID starting with 68c321389bc160641adc9746f909ea81e7d177fc2fea2fa0ac27c5a79be9460f not found: ID does not exist" Sep 30 11:38:56 crc kubenswrapper[4730]: I0930 11:38:56.394524 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd4dd146-ddba-4589-828a-66412e99105c" path="/var/lib/kubelet/pods/cd4dd146-ddba-4589-828a-66412e99105c/volumes" Sep 30 11:39:01 crc kubenswrapper[4730]: I0930 11:39:01.381722 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:39:01 crc kubenswrapper[4730]: E0930 11:39:01.383113 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:39:14 crc kubenswrapper[4730]: I0930 11:39:14.386594 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:39:14 crc kubenswrapper[4730]: E0930 11:39:14.388014 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:39:27 crc kubenswrapper[4730]: I0930 11:39:27.382087 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:39:27 crc kubenswrapper[4730]: E0930 11:39:27.382802 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:39:41 crc kubenswrapper[4730]: I0930 11:39:41.380986 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:39:42 crc kubenswrapper[4730]: I0930 11:39:42.023591 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"f8da3276b1fecb6794ec8f0d4d10c4ec11116640fed8ffd3e1e680a043c05017"} Sep 30 11:40:12 crc kubenswrapper[4730]: I0930 11:40:12.327295 4730 generic.go:334] "Generic (PLEG): container finished" podID="242561f4-5dfa-49fa-8c0b-4e6db0342923" containerID="384896d208e80bdbbad7fb038bef4d5f8515d13173fedb37b33513bfbd02c07e" exitCode=0 Sep 30 11:40:12 crc kubenswrapper[4730]: I0930 11:40:12.327464 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6kb84/must-gather-8d298" event={"ID":"242561f4-5dfa-49fa-8c0b-4e6db0342923","Type":"ContainerDied","Data":"384896d208e80bdbbad7fb038bef4d5f8515d13173fedb37b33513bfbd02c07e"} Sep 30 11:40:12 crc kubenswrapper[4730]: I0930 11:40:12.328484 4730 scope.go:117] "RemoveContainer" containerID="384896d208e80bdbbad7fb038bef4d5f8515d13173fedb37b33513bfbd02c07e" Sep 30 11:40:12 crc kubenswrapper[4730]: I0930 11:40:12.781653 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6kb84_must-gather-8d298_242561f4-5dfa-49fa-8c0b-4e6db0342923/gather/0.log" Sep 30 11:40:21 crc kubenswrapper[4730]: I0930 11:40:21.268019 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6kb84/must-gather-8d298"] Sep 30 11:40:21 crc kubenswrapper[4730]: I0930 11:40:21.269607 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-6kb84/must-gather-8d298" podUID="242561f4-5dfa-49fa-8c0b-4e6db0342923" containerName="copy" containerID="cri-o://effa99d252a63927551e37d4e045c36c051d3611b2911525b9a684592b0d1200" gracePeriod=2 Sep 30 11:40:21 crc kubenswrapper[4730]: I0930 11:40:21.276403 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6kb84/must-gather-8d298"] Sep 30 11:40:21 crc kubenswrapper[4730]: I0930 11:40:21.429239 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6kb84_must-gather-8d298_242561f4-5dfa-49fa-8c0b-4e6db0342923/copy/0.log" Sep 30 11:40:21 crc kubenswrapper[4730]: I0930 11:40:21.430229 4730 generic.go:334] "Generic (PLEG): container finished" podID="242561f4-5dfa-49fa-8c0b-4e6db0342923" containerID="effa99d252a63927551e37d4e045c36c051d3611b2911525b9a684592b0d1200" exitCode=143 Sep 30 11:40:21 crc kubenswrapper[4730]: I0930 11:40:21.699149 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6kb84_must-gather-8d298_242561f4-5dfa-49fa-8c0b-4e6db0342923/copy/0.log" Sep 30 11:40:21 crc kubenswrapper[4730]: I0930 11:40:21.699643 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/must-gather-8d298" Sep 30 11:40:21 crc kubenswrapper[4730]: I0930 11:40:21.829381 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dnxw\" (UniqueName: \"kubernetes.io/projected/242561f4-5dfa-49fa-8c0b-4e6db0342923-kube-api-access-4dnxw\") pod \"242561f4-5dfa-49fa-8c0b-4e6db0342923\" (UID: \"242561f4-5dfa-49fa-8c0b-4e6db0342923\") " Sep 30 11:40:21 crc kubenswrapper[4730]: I0930 11:40:21.829852 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/242561f4-5dfa-49fa-8c0b-4e6db0342923-must-gather-output\") pod \"242561f4-5dfa-49fa-8c0b-4e6db0342923\" (UID: \"242561f4-5dfa-49fa-8c0b-4e6db0342923\") " Sep 30 11:40:21 crc kubenswrapper[4730]: I0930 11:40:21.836296 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/242561f4-5dfa-49fa-8c0b-4e6db0342923-kube-api-access-4dnxw" (OuterVolumeSpecName: "kube-api-access-4dnxw") pod "242561f4-5dfa-49fa-8c0b-4e6db0342923" (UID: "242561f4-5dfa-49fa-8c0b-4e6db0342923"). InnerVolumeSpecName "kube-api-access-4dnxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:40:21 crc kubenswrapper[4730]: I0930 11:40:21.932652 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dnxw\" (UniqueName: \"kubernetes.io/projected/242561f4-5dfa-49fa-8c0b-4e6db0342923-kube-api-access-4dnxw\") on node \"crc\" DevicePath \"\"" Sep 30 11:40:22 crc kubenswrapper[4730]: I0930 11:40:22.047074 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/242561f4-5dfa-49fa-8c0b-4e6db0342923-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "242561f4-5dfa-49fa-8c0b-4e6db0342923" (UID: "242561f4-5dfa-49fa-8c0b-4e6db0342923"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:40:22 crc kubenswrapper[4730]: I0930 11:40:22.137217 4730 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/242561f4-5dfa-49fa-8c0b-4e6db0342923-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 30 11:40:22 crc kubenswrapper[4730]: I0930 11:40:22.393492 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="242561f4-5dfa-49fa-8c0b-4e6db0342923" path="/var/lib/kubelet/pods/242561f4-5dfa-49fa-8c0b-4e6db0342923/volumes" Sep 30 11:40:22 crc kubenswrapper[4730]: I0930 11:40:22.440054 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6kb84_must-gather-8d298_242561f4-5dfa-49fa-8c0b-4e6db0342923/copy/0.log" Sep 30 11:40:22 crc kubenswrapper[4730]: I0930 11:40:22.440380 4730 scope.go:117] "RemoveContainer" containerID="effa99d252a63927551e37d4e045c36c051d3611b2911525b9a684592b0d1200" Sep 30 11:40:22 crc kubenswrapper[4730]: I0930 11:40:22.440432 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6kb84/must-gather-8d298" Sep 30 11:40:22 crc kubenswrapper[4730]: I0930 11:40:22.458688 4730 scope.go:117] "RemoveContainer" containerID="384896d208e80bdbbad7fb038bef4d5f8515d13173fedb37b33513bfbd02c07e" Sep 30 11:40:33 crc kubenswrapper[4730]: I0930 11:40:33.526745 4730 scope.go:117] "RemoveContainer" containerID="0a96abf9fa9016e630601a8bcd7708f411f22d5dedf8350200c67e7ab47af270" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.376394 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wczj9"] Sep 30 11:40:35 crc kubenswrapper[4730]: E0930 11:40:35.377112 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd4dd146-ddba-4589-828a-66412e99105c" containerName="extract-utilities" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.377126 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd4dd146-ddba-4589-828a-66412e99105c" containerName="extract-utilities" Sep 30 11:40:35 crc kubenswrapper[4730]: E0930 11:40:35.377154 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd4dd146-ddba-4589-828a-66412e99105c" containerName="registry-server" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.377161 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd4dd146-ddba-4589-828a-66412e99105c" containerName="registry-server" Sep 30 11:40:35 crc kubenswrapper[4730]: E0930 11:40:35.377175 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="242561f4-5dfa-49fa-8c0b-4e6db0342923" containerName="copy" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.377181 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="242561f4-5dfa-49fa-8c0b-4e6db0342923" containerName="copy" Sep 30 11:40:35 crc kubenswrapper[4730]: E0930 11:40:35.377241 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="242561f4-5dfa-49fa-8c0b-4e6db0342923" containerName="gather" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.377247 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="242561f4-5dfa-49fa-8c0b-4e6db0342923" containerName="gather" Sep 30 11:40:35 crc kubenswrapper[4730]: E0930 11:40:35.377258 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd4dd146-ddba-4589-828a-66412e99105c" containerName="extract-content" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.377263 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd4dd146-ddba-4589-828a-66412e99105c" containerName="extract-content" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.377453 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd4dd146-ddba-4589-828a-66412e99105c" containerName="registry-server" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.377472 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="242561f4-5dfa-49fa-8c0b-4e6db0342923" containerName="gather" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.377483 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="242561f4-5dfa-49fa-8c0b-4e6db0342923" containerName="copy" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.379003 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.388037 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wczj9"] Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.558212 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ecb1eba-5299-48ef-9ece-03aba68e63fa-catalog-content\") pod \"redhat-marketplace-wczj9\" (UID: \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\") " pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.558584 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ecb1eba-5299-48ef-9ece-03aba68e63fa-utilities\") pod \"redhat-marketplace-wczj9\" (UID: \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\") " pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.558816 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqzr7\" (UniqueName: \"kubernetes.io/projected/8ecb1eba-5299-48ef-9ece-03aba68e63fa-kube-api-access-lqzr7\") pod \"redhat-marketplace-wczj9\" (UID: \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\") " pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.662316 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqzr7\" (UniqueName: \"kubernetes.io/projected/8ecb1eba-5299-48ef-9ece-03aba68e63fa-kube-api-access-lqzr7\") pod \"redhat-marketplace-wczj9\" (UID: \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\") " pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.662484 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ecb1eba-5299-48ef-9ece-03aba68e63fa-catalog-content\") pod \"redhat-marketplace-wczj9\" (UID: \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\") " pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.662628 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ecb1eba-5299-48ef-9ece-03aba68e63fa-utilities\") pod \"redhat-marketplace-wczj9\" (UID: \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\") " pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.663899 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ecb1eba-5299-48ef-9ece-03aba68e63fa-utilities\") pod \"redhat-marketplace-wczj9\" (UID: \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\") " pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.664374 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ecb1eba-5299-48ef-9ece-03aba68e63fa-catalog-content\") pod \"redhat-marketplace-wczj9\" (UID: \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\") " pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.695180 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqzr7\" (UniqueName: \"kubernetes.io/projected/8ecb1eba-5299-48ef-9ece-03aba68e63fa-kube-api-access-lqzr7\") pod \"redhat-marketplace-wczj9\" (UID: \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\") " pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:35 crc kubenswrapper[4730]: I0930 11:40:35.722800 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:36 crc kubenswrapper[4730]: I0930 11:40:36.233043 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wczj9"] Sep 30 11:40:36 crc kubenswrapper[4730]: I0930 11:40:36.588601 4730 generic.go:334] "Generic (PLEG): container finished" podID="8ecb1eba-5299-48ef-9ece-03aba68e63fa" containerID="a32115cfbb01587e25ad75ce980047110c2fc86d9a4950524fd1deb82c918c1d" exitCode=0 Sep 30 11:40:36 crc kubenswrapper[4730]: I0930 11:40:36.588709 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wczj9" event={"ID":"8ecb1eba-5299-48ef-9ece-03aba68e63fa","Type":"ContainerDied","Data":"a32115cfbb01587e25ad75ce980047110c2fc86d9a4950524fd1deb82c918c1d"} Sep 30 11:40:36 crc kubenswrapper[4730]: I0930 11:40:36.588966 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wczj9" event={"ID":"8ecb1eba-5299-48ef-9ece-03aba68e63fa","Type":"ContainerStarted","Data":"412f743759379c7ddbc4a9611b64aa0f1864400ba7f6e583516d856492e234b0"} Sep 30 11:40:37 crc kubenswrapper[4730]: I0930 11:40:37.602552 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wczj9" event={"ID":"8ecb1eba-5299-48ef-9ece-03aba68e63fa","Type":"ContainerStarted","Data":"d90a6c1294bd35f093233298c1cab3a17cd84a701ff804ca7e3a5049f5164623"} Sep 30 11:40:38 crc kubenswrapper[4730]: I0930 11:40:38.613869 4730 generic.go:334] "Generic (PLEG): container finished" podID="8ecb1eba-5299-48ef-9ece-03aba68e63fa" containerID="d90a6c1294bd35f093233298c1cab3a17cd84a701ff804ca7e3a5049f5164623" exitCode=0 Sep 30 11:40:38 crc kubenswrapper[4730]: I0930 11:40:38.613926 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wczj9" event={"ID":"8ecb1eba-5299-48ef-9ece-03aba68e63fa","Type":"ContainerDied","Data":"d90a6c1294bd35f093233298c1cab3a17cd84a701ff804ca7e3a5049f5164623"} Sep 30 11:40:39 crc kubenswrapper[4730]: I0930 11:40:39.625455 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wczj9" event={"ID":"8ecb1eba-5299-48ef-9ece-03aba68e63fa","Type":"ContainerStarted","Data":"91f60bccc3652c4969076799817a92648b722253cf0aadab3b8fbfedfd36f721"} Sep 30 11:40:39 crc kubenswrapper[4730]: I0930 11:40:39.644041 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wczj9" podStartSLOduration=1.942744041 podStartE2EDuration="4.644019464s" podCreationTimestamp="2025-09-30 11:40:35 +0000 UTC" firstStartedPulling="2025-09-30 11:40:36.59036418 +0000 UTC m=+6680.923624163" lastFinishedPulling="2025-09-30 11:40:39.291639593 +0000 UTC m=+6683.624899586" observedRunningTime="2025-09-30 11:40:39.639224688 +0000 UTC m=+6683.972484721" watchObservedRunningTime="2025-09-30 11:40:39.644019464 +0000 UTC m=+6683.977279457" Sep 30 11:40:45 crc kubenswrapper[4730]: I0930 11:40:45.722891 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:45 crc kubenswrapper[4730]: I0930 11:40:45.724578 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:45 crc kubenswrapper[4730]: I0930 11:40:45.775136 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:46 crc kubenswrapper[4730]: I0930 11:40:46.745232 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:46 crc kubenswrapper[4730]: I0930 11:40:46.798020 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wczj9"] Sep 30 11:40:48 crc kubenswrapper[4730]: I0930 11:40:48.706377 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wczj9" podUID="8ecb1eba-5299-48ef-9ece-03aba68e63fa" containerName="registry-server" containerID="cri-o://91f60bccc3652c4969076799817a92648b722253cf0aadab3b8fbfedfd36f721" gracePeriod=2 Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.195580 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.334971 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ecb1eba-5299-48ef-9ece-03aba68e63fa-catalog-content\") pod \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\" (UID: \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\") " Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.335091 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqzr7\" (UniqueName: \"kubernetes.io/projected/8ecb1eba-5299-48ef-9ece-03aba68e63fa-kube-api-access-lqzr7\") pod \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\" (UID: \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\") " Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.335224 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ecb1eba-5299-48ef-9ece-03aba68e63fa-utilities\") pod \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\" (UID: \"8ecb1eba-5299-48ef-9ece-03aba68e63fa\") " Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.336590 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ecb1eba-5299-48ef-9ece-03aba68e63fa-utilities" (OuterVolumeSpecName: "utilities") pod "8ecb1eba-5299-48ef-9ece-03aba68e63fa" (UID: "8ecb1eba-5299-48ef-9ece-03aba68e63fa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.341871 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ecb1eba-5299-48ef-9ece-03aba68e63fa-kube-api-access-lqzr7" (OuterVolumeSpecName: "kube-api-access-lqzr7") pod "8ecb1eba-5299-48ef-9ece-03aba68e63fa" (UID: "8ecb1eba-5299-48ef-9ece-03aba68e63fa"). InnerVolumeSpecName "kube-api-access-lqzr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.353535 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ecb1eba-5299-48ef-9ece-03aba68e63fa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8ecb1eba-5299-48ef-9ece-03aba68e63fa" (UID: "8ecb1eba-5299-48ef-9ece-03aba68e63fa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.437411 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ecb1eba-5299-48ef-9ece-03aba68e63fa-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.437454 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqzr7\" (UniqueName: \"kubernetes.io/projected/8ecb1eba-5299-48ef-9ece-03aba68e63fa-kube-api-access-lqzr7\") on node \"crc\" DevicePath \"\"" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.437465 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ecb1eba-5299-48ef-9ece-03aba68e63fa-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.717544 4730 generic.go:334] "Generic (PLEG): container finished" podID="8ecb1eba-5299-48ef-9ece-03aba68e63fa" containerID="91f60bccc3652c4969076799817a92648b722253cf0aadab3b8fbfedfd36f721" exitCode=0 Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.717594 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wczj9" event={"ID":"8ecb1eba-5299-48ef-9ece-03aba68e63fa","Type":"ContainerDied","Data":"91f60bccc3652c4969076799817a92648b722253cf0aadab3b8fbfedfd36f721"} Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.717728 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wczj9" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.717769 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wczj9" event={"ID":"8ecb1eba-5299-48ef-9ece-03aba68e63fa","Type":"ContainerDied","Data":"412f743759379c7ddbc4a9611b64aa0f1864400ba7f6e583516d856492e234b0"} Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.717809 4730 scope.go:117] "RemoveContainer" containerID="91f60bccc3652c4969076799817a92648b722253cf0aadab3b8fbfedfd36f721" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.740322 4730 scope.go:117] "RemoveContainer" containerID="d90a6c1294bd35f093233298c1cab3a17cd84a701ff804ca7e3a5049f5164623" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.765238 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wczj9"] Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.775070 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wczj9"] Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.780475 4730 scope.go:117] "RemoveContainer" containerID="a32115cfbb01587e25ad75ce980047110c2fc86d9a4950524fd1deb82c918c1d" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.845981 4730 scope.go:117] "RemoveContainer" containerID="91f60bccc3652c4969076799817a92648b722253cf0aadab3b8fbfedfd36f721" Sep 30 11:40:49 crc kubenswrapper[4730]: E0930 11:40:49.846226 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91f60bccc3652c4969076799817a92648b722253cf0aadab3b8fbfedfd36f721\": container with ID starting with 91f60bccc3652c4969076799817a92648b722253cf0aadab3b8fbfedfd36f721 not found: ID does not exist" containerID="91f60bccc3652c4969076799817a92648b722253cf0aadab3b8fbfedfd36f721" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.846274 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91f60bccc3652c4969076799817a92648b722253cf0aadab3b8fbfedfd36f721"} err="failed to get container status \"91f60bccc3652c4969076799817a92648b722253cf0aadab3b8fbfedfd36f721\": rpc error: code = NotFound desc = could not find container \"91f60bccc3652c4969076799817a92648b722253cf0aadab3b8fbfedfd36f721\": container with ID starting with 91f60bccc3652c4969076799817a92648b722253cf0aadab3b8fbfedfd36f721 not found: ID does not exist" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.846305 4730 scope.go:117] "RemoveContainer" containerID="d90a6c1294bd35f093233298c1cab3a17cd84a701ff804ca7e3a5049f5164623" Sep 30 11:40:49 crc kubenswrapper[4730]: E0930 11:40:49.846520 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d90a6c1294bd35f093233298c1cab3a17cd84a701ff804ca7e3a5049f5164623\": container with ID starting with d90a6c1294bd35f093233298c1cab3a17cd84a701ff804ca7e3a5049f5164623 not found: ID does not exist" containerID="d90a6c1294bd35f093233298c1cab3a17cd84a701ff804ca7e3a5049f5164623" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.846557 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d90a6c1294bd35f093233298c1cab3a17cd84a701ff804ca7e3a5049f5164623"} err="failed to get container status \"d90a6c1294bd35f093233298c1cab3a17cd84a701ff804ca7e3a5049f5164623\": rpc error: code = NotFound desc = could not find container \"d90a6c1294bd35f093233298c1cab3a17cd84a701ff804ca7e3a5049f5164623\": container with ID starting with d90a6c1294bd35f093233298c1cab3a17cd84a701ff804ca7e3a5049f5164623 not found: ID does not exist" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.846574 4730 scope.go:117] "RemoveContainer" containerID="a32115cfbb01587e25ad75ce980047110c2fc86d9a4950524fd1deb82c918c1d" Sep 30 11:40:49 crc kubenswrapper[4730]: E0930 11:40:49.846871 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a32115cfbb01587e25ad75ce980047110c2fc86d9a4950524fd1deb82c918c1d\": container with ID starting with a32115cfbb01587e25ad75ce980047110c2fc86d9a4950524fd1deb82c918c1d not found: ID does not exist" containerID="a32115cfbb01587e25ad75ce980047110c2fc86d9a4950524fd1deb82c918c1d" Sep 30 11:40:49 crc kubenswrapper[4730]: I0930 11:40:49.846903 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a32115cfbb01587e25ad75ce980047110c2fc86d9a4950524fd1deb82c918c1d"} err="failed to get container status \"a32115cfbb01587e25ad75ce980047110c2fc86d9a4950524fd1deb82c918c1d\": rpc error: code = NotFound desc = could not find container \"a32115cfbb01587e25ad75ce980047110c2fc86d9a4950524fd1deb82c918c1d\": container with ID starting with a32115cfbb01587e25ad75ce980047110c2fc86d9a4950524fd1deb82c918c1d not found: ID does not exist" Sep 30 11:40:50 crc kubenswrapper[4730]: I0930 11:40:50.390844 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ecb1eba-5299-48ef-9ece-03aba68e63fa" path="/var/lib/kubelet/pods/8ecb1eba-5299-48ef-9ece-03aba68e63fa/volumes" Sep 30 11:41:01 crc kubenswrapper[4730]: I0930 11:41:01.785922 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jbr8l/must-gather-k8rvv"] Sep 30 11:41:01 crc kubenswrapper[4730]: E0930 11:41:01.786795 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ecb1eba-5299-48ef-9ece-03aba68e63fa" containerName="extract-utilities" Sep 30 11:41:01 crc kubenswrapper[4730]: I0930 11:41:01.786808 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ecb1eba-5299-48ef-9ece-03aba68e63fa" containerName="extract-utilities" Sep 30 11:41:01 crc kubenswrapper[4730]: E0930 11:41:01.786825 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ecb1eba-5299-48ef-9ece-03aba68e63fa" containerName="registry-server" Sep 30 11:41:01 crc kubenswrapper[4730]: I0930 11:41:01.786834 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ecb1eba-5299-48ef-9ece-03aba68e63fa" containerName="registry-server" Sep 30 11:41:01 crc kubenswrapper[4730]: E0930 11:41:01.786849 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ecb1eba-5299-48ef-9ece-03aba68e63fa" containerName="extract-content" Sep 30 11:41:01 crc kubenswrapper[4730]: I0930 11:41:01.786856 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ecb1eba-5299-48ef-9ece-03aba68e63fa" containerName="extract-content" Sep 30 11:41:01 crc kubenswrapper[4730]: I0930 11:41:01.787046 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ecb1eba-5299-48ef-9ece-03aba68e63fa" containerName="registry-server" Sep 30 11:41:01 crc kubenswrapper[4730]: I0930 11:41:01.788059 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/must-gather-k8rvv" Sep 30 11:41:01 crc kubenswrapper[4730]: I0930 11:41:01.791281 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jbr8l"/"openshift-service-ca.crt" Sep 30 11:41:01 crc kubenswrapper[4730]: I0930 11:41:01.793498 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-jbr8l"/"kube-root-ca.crt" Sep 30 11:41:01 crc kubenswrapper[4730]: I0930 11:41:01.801948 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jbr8l/must-gather-k8rvv"] Sep 30 11:41:01 crc kubenswrapper[4730]: I0930 11:41:01.904971 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9e4da034-b6ff-407c-a38b-3e9bb41095e5-must-gather-output\") pod \"must-gather-k8rvv\" (UID: \"9e4da034-b6ff-407c-a38b-3e9bb41095e5\") " pod="openshift-must-gather-jbr8l/must-gather-k8rvv" Sep 30 11:41:01 crc kubenswrapper[4730]: I0930 11:41:01.905136 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbpld\" (UniqueName: \"kubernetes.io/projected/9e4da034-b6ff-407c-a38b-3e9bb41095e5-kube-api-access-pbpld\") pod \"must-gather-k8rvv\" (UID: \"9e4da034-b6ff-407c-a38b-3e9bb41095e5\") " pod="openshift-must-gather-jbr8l/must-gather-k8rvv" Sep 30 11:41:02 crc kubenswrapper[4730]: I0930 11:41:02.007310 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9e4da034-b6ff-407c-a38b-3e9bb41095e5-must-gather-output\") pod \"must-gather-k8rvv\" (UID: \"9e4da034-b6ff-407c-a38b-3e9bb41095e5\") " pod="openshift-must-gather-jbr8l/must-gather-k8rvv" Sep 30 11:41:02 crc kubenswrapper[4730]: I0930 11:41:02.007460 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbpld\" (UniqueName: \"kubernetes.io/projected/9e4da034-b6ff-407c-a38b-3e9bb41095e5-kube-api-access-pbpld\") pod \"must-gather-k8rvv\" (UID: \"9e4da034-b6ff-407c-a38b-3e9bb41095e5\") " pod="openshift-must-gather-jbr8l/must-gather-k8rvv" Sep 30 11:41:02 crc kubenswrapper[4730]: I0930 11:41:02.007985 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9e4da034-b6ff-407c-a38b-3e9bb41095e5-must-gather-output\") pod \"must-gather-k8rvv\" (UID: \"9e4da034-b6ff-407c-a38b-3e9bb41095e5\") " pod="openshift-must-gather-jbr8l/must-gather-k8rvv" Sep 30 11:41:02 crc kubenswrapper[4730]: I0930 11:41:02.033415 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbpld\" (UniqueName: \"kubernetes.io/projected/9e4da034-b6ff-407c-a38b-3e9bb41095e5-kube-api-access-pbpld\") pod \"must-gather-k8rvv\" (UID: \"9e4da034-b6ff-407c-a38b-3e9bb41095e5\") " pod="openshift-must-gather-jbr8l/must-gather-k8rvv" Sep 30 11:41:02 crc kubenswrapper[4730]: I0930 11:41:02.109434 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/must-gather-k8rvv" Sep 30 11:41:02 crc kubenswrapper[4730]: I0930 11:41:02.375698 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-jbr8l/must-gather-k8rvv"] Sep 30 11:41:02 crc kubenswrapper[4730]: I0930 11:41:02.842138 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jbr8l/must-gather-k8rvv" event={"ID":"9e4da034-b6ff-407c-a38b-3e9bb41095e5","Type":"ContainerStarted","Data":"4aca616e7c530b9bda27d52cb344073ffd89ab92711c37e2d62e32545e969589"} Sep 30 11:41:02 crc kubenswrapper[4730]: I0930 11:41:02.843355 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jbr8l/must-gather-k8rvv" event={"ID":"9e4da034-b6ff-407c-a38b-3e9bb41095e5","Type":"ContainerStarted","Data":"a5deca41328dad5dd8159cdcc05d6020179cafbb38df3c2ccf31c66c657f762a"} Sep 30 11:41:03 crc kubenswrapper[4730]: I0930 11:41:03.853210 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jbr8l/must-gather-k8rvv" event={"ID":"9e4da034-b6ff-407c-a38b-3e9bb41095e5","Type":"ContainerStarted","Data":"0a757118e4a12c408d06a5ed7cd1475484e0a20f1f1e944a9d1c20d8d9b7e4c4"} Sep 30 11:41:03 crc kubenswrapper[4730]: I0930 11:41:03.888606 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jbr8l/must-gather-k8rvv" podStartSLOduration=2.888578416 podStartE2EDuration="2.888578416s" podCreationTimestamp="2025-09-30 11:41:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 11:41:03.875120851 +0000 UTC m=+6708.208380854" watchObservedRunningTime="2025-09-30 11:41:03.888578416 +0000 UTC m=+6708.221838409" Sep 30 11:41:06 crc kubenswrapper[4730]: I0930 11:41:06.743607 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jbr8l/crc-debug-fhjl7"] Sep 30 11:41:06 crc kubenswrapper[4730]: I0930 11:41:06.745291 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" Sep 30 11:41:06 crc kubenswrapper[4730]: I0930 11:41:06.747402 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-jbr8l"/"default-dockercfg-tbwfg" Sep 30 11:41:06 crc kubenswrapper[4730]: I0930 11:41:06.922737 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd263d56-6ba6-4190-a317-f226ec12d3e9-host\") pod \"crc-debug-fhjl7\" (UID: \"fd263d56-6ba6-4190-a317-f226ec12d3e9\") " pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" Sep 30 11:41:06 crc kubenswrapper[4730]: I0930 11:41:06.922799 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcsfs\" (UniqueName: \"kubernetes.io/projected/fd263d56-6ba6-4190-a317-f226ec12d3e9-kube-api-access-lcsfs\") pod \"crc-debug-fhjl7\" (UID: \"fd263d56-6ba6-4190-a317-f226ec12d3e9\") " pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" Sep 30 11:41:07 crc kubenswrapper[4730]: I0930 11:41:07.024337 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd263d56-6ba6-4190-a317-f226ec12d3e9-host\") pod \"crc-debug-fhjl7\" (UID: \"fd263d56-6ba6-4190-a317-f226ec12d3e9\") " pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" Sep 30 11:41:07 crc kubenswrapper[4730]: I0930 11:41:07.024397 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcsfs\" (UniqueName: \"kubernetes.io/projected/fd263d56-6ba6-4190-a317-f226ec12d3e9-kube-api-access-lcsfs\") pod \"crc-debug-fhjl7\" (UID: \"fd263d56-6ba6-4190-a317-f226ec12d3e9\") " pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" Sep 30 11:41:07 crc kubenswrapper[4730]: I0930 11:41:07.024713 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd263d56-6ba6-4190-a317-f226ec12d3e9-host\") pod \"crc-debug-fhjl7\" (UID: \"fd263d56-6ba6-4190-a317-f226ec12d3e9\") " pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" Sep 30 11:41:07 crc kubenswrapper[4730]: I0930 11:41:07.052384 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcsfs\" (UniqueName: \"kubernetes.io/projected/fd263d56-6ba6-4190-a317-f226ec12d3e9-kube-api-access-lcsfs\") pod \"crc-debug-fhjl7\" (UID: \"fd263d56-6ba6-4190-a317-f226ec12d3e9\") " pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" Sep 30 11:41:07 crc kubenswrapper[4730]: I0930 11:41:07.065649 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" Sep 30 11:41:07 crc kubenswrapper[4730]: I0930 11:41:07.892486 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" event={"ID":"fd263d56-6ba6-4190-a317-f226ec12d3e9","Type":"ContainerStarted","Data":"0d8f899c7563130dc54902c438f300c84b40731e29fd135d19b985f619a43422"} Sep 30 11:41:07 crc kubenswrapper[4730]: I0930 11:41:07.893101 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" event={"ID":"fd263d56-6ba6-4190-a317-f226ec12d3e9","Type":"ContainerStarted","Data":"d4879055bdd64cd9ffd0fa55364ed5e51eefdc544d8d18a18b857f66e915fce2"} Sep 30 11:41:07 crc kubenswrapper[4730]: I0930 11:41:07.906012 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" podStartSLOduration=1.905987477 podStartE2EDuration="1.905987477s" podCreationTimestamp="2025-09-30 11:41:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 11:41:07.903284667 +0000 UTC m=+6712.236544660" watchObservedRunningTime="2025-09-30 11:41:07.905987477 +0000 UTC m=+6712.239247470" Sep 30 11:42:02 crc kubenswrapper[4730]: I0930 11:42:02.336985 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:42:02 crc kubenswrapper[4730]: I0930 11:42:02.337347 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:42:18 crc kubenswrapper[4730]: I0930 11:42:18.192497 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6fvh5"] Sep 30 11:42:18 crc kubenswrapper[4730]: I0930 11:42:18.195744 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:18 crc kubenswrapper[4730]: I0930 11:42:18.210975 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6fvh5"] Sep 30 11:42:18 crc kubenswrapper[4730]: I0930 11:42:18.262287 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2265\" (UniqueName: \"kubernetes.io/projected/750bddf6-1369-48f0-b044-0579426ef1b7-kube-api-access-g2265\") pod \"certified-operators-6fvh5\" (UID: \"750bddf6-1369-48f0-b044-0579426ef1b7\") " pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:18 crc kubenswrapper[4730]: I0930 11:42:18.262358 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/750bddf6-1369-48f0-b044-0579426ef1b7-catalog-content\") pod \"certified-operators-6fvh5\" (UID: \"750bddf6-1369-48f0-b044-0579426ef1b7\") " pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:18 crc kubenswrapper[4730]: I0930 11:42:18.262952 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/750bddf6-1369-48f0-b044-0579426ef1b7-utilities\") pod \"certified-operators-6fvh5\" (UID: \"750bddf6-1369-48f0-b044-0579426ef1b7\") " pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:18 crc kubenswrapper[4730]: I0930 11:42:18.365182 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/750bddf6-1369-48f0-b044-0579426ef1b7-catalog-content\") pod \"certified-operators-6fvh5\" (UID: \"750bddf6-1369-48f0-b044-0579426ef1b7\") " pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:18 crc kubenswrapper[4730]: I0930 11:42:18.365406 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/750bddf6-1369-48f0-b044-0579426ef1b7-utilities\") pod \"certified-operators-6fvh5\" (UID: \"750bddf6-1369-48f0-b044-0579426ef1b7\") " pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:18 crc kubenswrapper[4730]: I0930 11:42:18.365474 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2265\" (UniqueName: \"kubernetes.io/projected/750bddf6-1369-48f0-b044-0579426ef1b7-kube-api-access-g2265\") pod \"certified-operators-6fvh5\" (UID: \"750bddf6-1369-48f0-b044-0579426ef1b7\") " pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:18 crc kubenswrapper[4730]: I0930 11:42:18.366502 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/750bddf6-1369-48f0-b044-0579426ef1b7-catalog-content\") pod \"certified-operators-6fvh5\" (UID: \"750bddf6-1369-48f0-b044-0579426ef1b7\") " pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:18 crc kubenswrapper[4730]: I0930 11:42:18.366758 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/750bddf6-1369-48f0-b044-0579426ef1b7-utilities\") pod \"certified-operators-6fvh5\" (UID: \"750bddf6-1369-48f0-b044-0579426ef1b7\") " pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:18 crc kubenswrapper[4730]: I0930 11:42:18.392185 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2265\" (UniqueName: \"kubernetes.io/projected/750bddf6-1369-48f0-b044-0579426ef1b7-kube-api-access-g2265\") pod \"certified-operators-6fvh5\" (UID: \"750bddf6-1369-48f0-b044-0579426ef1b7\") " pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:18 crc kubenswrapper[4730]: I0930 11:42:18.559348 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:19 crc kubenswrapper[4730]: I0930 11:42:19.136852 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6fvh5"] Sep 30 11:42:19 crc kubenswrapper[4730]: I0930 11:42:19.538754 4730 generic.go:334] "Generic (PLEG): container finished" podID="750bddf6-1369-48f0-b044-0579426ef1b7" containerID="9629fcb2ba28cc511713f0733c3acd740ea497398d350c548ce036bcdf7511b8" exitCode=0 Sep 30 11:42:19 crc kubenswrapper[4730]: I0930 11:42:19.538803 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fvh5" event={"ID":"750bddf6-1369-48f0-b044-0579426ef1b7","Type":"ContainerDied","Data":"9629fcb2ba28cc511713f0733c3acd740ea497398d350c548ce036bcdf7511b8"} Sep 30 11:42:19 crc kubenswrapper[4730]: I0930 11:42:19.538831 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fvh5" event={"ID":"750bddf6-1369-48f0-b044-0579426ef1b7","Type":"ContainerStarted","Data":"e1070649bce1b1e59bac0b6e189368b4094b5617a79f45ead1e06f70ba8ff0c9"} Sep 30 11:42:21 crc kubenswrapper[4730]: I0930 11:42:21.559366 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fvh5" event={"ID":"750bddf6-1369-48f0-b044-0579426ef1b7","Type":"ContainerStarted","Data":"ac42081f8ca4d495435f5f987111a696ac3f3488df36905e324b3e72d1db159e"} Sep 30 11:42:22 crc kubenswrapper[4730]: I0930 11:42:22.570808 4730 generic.go:334] "Generic (PLEG): container finished" podID="750bddf6-1369-48f0-b044-0579426ef1b7" containerID="ac42081f8ca4d495435f5f987111a696ac3f3488df36905e324b3e72d1db159e" exitCode=0 Sep 30 11:42:22 crc kubenswrapper[4730]: I0930 11:42:22.571000 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fvh5" event={"ID":"750bddf6-1369-48f0-b044-0579426ef1b7","Type":"ContainerDied","Data":"ac42081f8ca4d495435f5f987111a696ac3f3488df36905e324b3e72d1db159e"} Sep 30 11:42:23 crc kubenswrapper[4730]: I0930 11:42:23.590104 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fvh5" event={"ID":"750bddf6-1369-48f0-b044-0579426ef1b7","Type":"ContainerStarted","Data":"c8041e0c3b2a48cff95deb0666ffd3924bdc15d8803a0cab0481415b1173b517"} Sep 30 11:42:23 crc kubenswrapper[4730]: I0930 11:42:23.619865 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6fvh5" podStartSLOduration=2.149241396 podStartE2EDuration="5.619841807s" podCreationTimestamp="2025-09-30 11:42:18 +0000 UTC" firstStartedPulling="2025-09-30 11:42:19.54165107 +0000 UTC m=+6783.874911063" lastFinishedPulling="2025-09-30 11:42:23.012251441 +0000 UTC m=+6787.345511474" observedRunningTime="2025-09-30 11:42:23.613418688 +0000 UTC m=+6787.946678701" watchObservedRunningTime="2025-09-30 11:42:23.619841807 +0000 UTC m=+6787.953101810" Sep 30 11:42:28 crc kubenswrapper[4730]: I0930 11:42:28.560176 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:28 crc kubenswrapper[4730]: I0930 11:42:28.560873 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:28 crc kubenswrapper[4730]: I0930 11:42:28.611680 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:28 crc kubenswrapper[4730]: I0930 11:42:28.702588 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:28 crc kubenswrapper[4730]: I0930 11:42:28.860030 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6fvh5"] Sep 30 11:42:30 crc kubenswrapper[4730]: I0930 11:42:30.675536 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6fvh5" podUID="750bddf6-1369-48f0-b044-0579426ef1b7" containerName="registry-server" containerID="cri-o://c8041e0c3b2a48cff95deb0666ffd3924bdc15d8803a0cab0481415b1173b517" gracePeriod=2 Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.187432 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.268133 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/750bddf6-1369-48f0-b044-0579426ef1b7-utilities\") pod \"750bddf6-1369-48f0-b044-0579426ef1b7\" (UID: \"750bddf6-1369-48f0-b044-0579426ef1b7\") " Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.268225 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/750bddf6-1369-48f0-b044-0579426ef1b7-catalog-content\") pod \"750bddf6-1369-48f0-b044-0579426ef1b7\" (UID: \"750bddf6-1369-48f0-b044-0579426ef1b7\") " Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.268370 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2265\" (UniqueName: \"kubernetes.io/projected/750bddf6-1369-48f0-b044-0579426ef1b7-kube-api-access-g2265\") pod \"750bddf6-1369-48f0-b044-0579426ef1b7\" (UID: \"750bddf6-1369-48f0-b044-0579426ef1b7\") " Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.270479 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/750bddf6-1369-48f0-b044-0579426ef1b7-utilities" (OuterVolumeSpecName: "utilities") pod "750bddf6-1369-48f0-b044-0579426ef1b7" (UID: "750bddf6-1369-48f0-b044-0579426ef1b7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.277447 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/750bddf6-1369-48f0-b044-0579426ef1b7-kube-api-access-g2265" (OuterVolumeSpecName: "kube-api-access-g2265") pod "750bddf6-1369-48f0-b044-0579426ef1b7" (UID: "750bddf6-1369-48f0-b044-0579426ef1b7"). InnerVolumeSpecName "kube-api-access-g2265". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.279375 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2265\" (UniqueName: \"kubernetes.io/projected/750bddf6-1369-48f0-b044-0579426ef1b7-kube-api-access-g2265\") on node \"crc\" DevicePath \"\"" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.279409 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/750bddf6-1369-48f0-b044-0579426ef1b7-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.318569 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/750bddf6-1369-48f0-b044-0579426ef1b7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "750bddf6-1369-48f0-b044-0579426ef1b7" (UID: "750bddf6-1369-48f0-b044-0579426ef1b7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.381060 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/750bddf6-1369-48f0-b044-0579426ef1b7-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.686099 4730 generic.go:334] "Generic (PLEG): container finished" podID="750bddf6-1369-48f0-b044-0579426ef1b7" containerID="c8041e0c3b2a48cff95deb0666ffd3924bdc15d8803a0cab0481415b1173b517" exitCode=0 Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.686153 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6fvh5" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.686175 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fvh5" event={"ID":"750bddf6-1369-48f0-b044-0579426ef1b7","Type":"ContainerDied","Data":"c8041e0c3b2a48cff95deb0666ffd3924bdc15d8803a0cab0481415b1173b517"} Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.687330 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6fvh5" event={"ID":"750bddf6-1369-48f0-b044-0579426ef1b7","Type":"ContainerDied","Data":"e1070649bce1b1e59bac0b6e189368b4094b5617a79f45ead1e06f70ba8ff0c9"} Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.687386 4730 scope.go:117] "RemoveContainer" containerID="c8041e0c3b2a48cff95deb0666ffd3924bdc15d8803a0cab0481415b1173b517" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.735455 4730 scope.go:117] "RemoveContainer" containerID="ac42081f8ca4d495435f5f987111a696ac3f3488df36905e324b3e72d1db159e" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.767708 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6fvh5"] Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.783074 4730 scope.go:117] "RemoveContainer" containerID="9629fcb2ba28cc511713f0733c3acd740ea497398d350c548ce036bcdf7511b8" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.790539 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6fvh5"] Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.817069 4730 scope.go:117] "RemoveContainer" containerID="c8041e0c3b2a48cff95deb0666ffd3924bdc15d8803a0cab0481415b1173b517" Sep 30 11:42:31 crc kubenswrapper[4730]: E0930 11:42:31.817949 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8041e0c3b2a48cff95deb0666ffd3924bdc15d8803a0cab0481415b1173b517\": container with ID starting with c8041e0c3b2a48cff95deb0666ffd3924bdc15d8803a0cab0481415b1173b517 not found: ID does not exist" containerID="c8041e0c3b2a48cff95deb0666ffd3924bdc15d8803a0cab0481415b1173b517" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.817983 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8041e0c3b2a48cff95deb0666ffd3924bdc15d8803a0cab0481415b1173b517"} err="failed to get container status \"c8041e0c3b2a48cff95deb0666ffd3924bdc15d8803a0cab0481415b1173b517\": rpc error: code = NotFound desc = could not find container \"c8041e0c3b2a48cff95deb0666ffd3924bdc15d8803a0cab0481415b1173b517\": container with ID starting with c8041e0c3b2a48cff95deb0666ffd3924bdc15d8803a0cab0481415b1173b517 not found: ID does not exist" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.818003 4730 scope.go:117] "RemoveContainer" containerID="ac42081f8ca4d495435f5f987111a696ac3f3488df36905e324b3e72d1db159e" Sep 30 11:42:31 crc kubenswrapper[4730]: E0930 11:42:31.818364 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac42081f8ca4d495435f5f987111a696ac3f3488df36905e324b3e72d1db159e\": container with ID starting with ac42081f8ca4d495435f5f987111a696ac3f3488df36905e324b3e72d1db159e not found: ID does not exist" containerID="ac42081f8ca4d495435f5f987111a696ac3f3488df36905e324b3e72d1db159e" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.818407 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac42081f8ca4d495435f5f987111a696ac3f3488df36905e324b3e72d1db159e"} err="failed to get container status \"ac42081f8ca4d495435f5f987111a696ac3f3488df36905e324b3e72d1db159e\": rpc error: code = NotFound desc = could not find container \"ac42081f8ca4d495435f5f987111a696ac3f3488df36905e324b3e72d1db159e\": container with ID starting with ac42081f8ca4d495435f5f987111a696ac3f3488df36905e324b3e72d1db159e not found: ID does not exist" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.818435 4730 scope.go:117] "RemoveContainer" containerID="9629fcb2ba28cc511713f0733c3acd740ea497398d350c548ce036bcdf7511b8" Sep 30 11:42:31 crc kubenswrapper[4730]: E0930 11:42:31.818722 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9629fcb2ba28cc511713f0733c3acd740ea497398d350c548ce036bcdf7511b8\": container with ID starting with 9629fcb2ba28cc511713f0733c3acd740ea497398d350c548ce036bcdf7511b8 not found: ID does not exist" containerID="9629fcb2ba28cc511713f0733c3acd740ea497398d350c548ce036bcdf7511b8" Sep 30 11:42:31 crc kubenswrapper[4730]: I0930 11:42:31.818747 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9629fcb2ba28cc511713f0733c3acd740ea497398d350c548ce036bcdf7511b8"} err="failed to get container status \"9629fcb2ba28cc511713f0733c3acd740ea497398d350c548ce036bcdf7511b8\": rpc error: code = NotFound desc = could not find container \"9629fcb2ba28cc511713f0733c3acd740ea497398d350c548ce036bcdf7511b8\": container with ID starting with 9629fcb2ba28cc511713f0733c3acd740ea497398d350c548ce036bcdf7511b8 not found: ID does not exist" Sep 30 11:42:32 crc kubenswrapper[4730]: I0930 11:42:32.337438 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:42:32 crc kubenswrapper[4730]: I0930 11:42:32.337766 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:42:32 crc kubenswrapper[4730]: I0930 11:42:32.393226 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="750bddf6-1369-48f0-b044-0579426ef1b7" path="/var/lib/kubelet/pods/750bddf6-1369-48f0-b044-0579426ef1b7/volumes" Sep 30 11:42:33 crc kubenswrapper[4730]: I0930 11:42:33.137234 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54cc89d54-v6hb5_857c5dfa-3085-497b-8466-96eefd60c85d/barbican-api/0.log" Sep 30 11:42:33 crc kubenswrapper[4730]: I0930 11:42:33.279895 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-54cc89d54-v6hb5_857c5dfa-3085-497b-8466-96eefd60c85d/barbican-api-log/0.log" Sep 30 11:42:33 crc kubenswrapper[4730]: I0930 11:42:33.665921 4730 scope.go:117] "RemoveContainer" containerID="8b7bbf270c5a949769bb51ae97928c3acadfb2586931215ce11494076af40a3c" Sep 30 11:42:33 crc kubenswrapper[4730]: I0930 11:42:33.719370 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6cdddb844-vsgsp_1018e1cd-c432-45b3-8267-0f37607cff2f/barbican-keystone-listener/0.log" Sep 30 11:42:33 crc kubenswrapper[4730]: I0930 11:42:33.980914 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6cdddb844-vsgsp_1018e1cd-c432-45b3-8267-0f37607cff2f/barbican-keystone-listener-log/0.log" Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.330263 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-665f7f89df-b6qmw_87696e9f-ed08-459f-80b7-c4c5499e4157/barbican-worker/0.log" Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.382351 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-665f7f89df-b6qmw_87696e9f-ed08-459f-80b7-c4c5499e4157/barbican-worker-log/0.log" Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.653354 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-2qpzs_96923f77-1ffc-4d73-adf3-33f66499e0f9/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.899137 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-h2qp6"] Sep 30 11:42:34 crc kubenswrapper[4730]: E0930 11:42:34.899665 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="750bddf6-1369-48f0-b044-0579426ef1b7" containerName="extract-content" Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.899715 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="750bddf6-1369-48f0-b044-0579426ef1b7" containerName="extract-content" Sep 30 11:42:34 crc kubenswrapper[4730]: E0930 11:42:34.899738 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="750bddf6-1369-48f0-b044-0579426ef1b7" containerName="registry-server" Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.899744 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="750bddf6-1369-48f0-b044-0579426ef1b7" containerName="registry-server" Sep 30 11:42:34 crc kubenswrapper[4730]: E0930 11:42:34.899757 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="750bddf6-1369-48f0-b044-0579426ef1b7" containerName="extract-utilities" Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.899763 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="750bddf6-1369-48f0-b044-0579426ef1b7" containerName="extract-utilities" Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.900020 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="750bddf6-1369-48f0-b044-0579426ef1b7" containerName="registry-server" Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.901567 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.915908 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h2qp6"] Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.929404 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4eb0bfb8-0c6a-4d5f-9b85-e06888511203/ceilometer-central-agent/0.log" Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.957625 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/481fb01d-12be-4f70-8f95-bd250c953c74-utilities\") pod \"redhat-operators-h2qp6\" (UID: \"481fb01d-12be-4f70-8f95-bd250c953c74\") " pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.957695 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/481fb01d-12be-4f70-8f95-bd250c953c74-catalog-content\") pod \"redhat-operators-h2qp6\" (UID: \"481fb01d-12be-4f70-8f95-bd250c953c74\") " pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:34 crc kubenswrapper[4730]: I0930 11:42:34.957753 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t64m2\" (UniqueName: \"kubernetes.io/projected/481fb01d-12be-4f70-8f95-bd250c953c74-kube-api-access-t64m2\") pod \"redhat-operators-h2qp6\" (UID: \"481fb01d-12be-4f70-8f95-bd250c953c74\") " pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:35 crc kubenswrapper[4730]: I0930 11:42:35.050229 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4eb0bfb8-0c6a-4d5f-9b85-e06888511203/ceilometer-notification-agent/0.log" Sep 30 11:42:35 crc kubenswrapper[4730]: I0930 11:42:35.059795 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/481fb01d-12be-4f70-8f95-bd250c953c74-utilities\") pod \"redhat-operators-h2qp6\" (UID: \"481fb01d-12be-4f70-8f95-bd250c953c74\") " pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:35 crc kubenswrapper[4730]: I0930 11:42:35.059881 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/481fb01d-12be-4f70-8f95-bd250c953c74-catalog-content\") pod \"redhat-operators-h2qp6\" (UID: \"481fb01d-12be-4f70-8f95-bd250c953c74\") " pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:35 crc kubenswrapper[4730]: I0930 11:42:35.059927 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t64m2\" (UniqueName: \"kubernetes.io/projected/481fb01d-12be-4f70-8f95-bd250c953c74-kube-api-access-t64m2\") pod \"redhat-operators-h2qp6\" (UID: \"481fb01d-12be-4f70-8f95-bd250c953c74\") " pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:35 crc kubenswrapper[4730]: I0930 11:42:35.060359 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/481fb01d-12be-4f70-8f95-bd250c953c74-utilities\") pod \"redhat-operators-h2qp6\" (UID: \"481fb01d-12be-4f70-8f95-bd250c953c74\") " pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:35 crc kubenswrapper[4730]: I0930 11:42:35.060680 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/481fb01d-12be-4f70-8f95-bd250c953c74-catalog-content\") pod \"redhat-operators-h2qp6\" (UID: \"481fb01d-12be-4f70-8f95-bd250c953c74\") " pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:35 crc kubenswrapper[4730]: I0930 11:42:35.091319 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t64m2\" (UniqueName: \"kubernetes.io/projected/481fb01d-12be-4f70-8f95-bd250c953c74-kube-api-access-t64m2\") pod \"redhat-operators-h2qp6\" (UID: \"481fb01d-12be-4f70-8f95-bd250c953c74\") " pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:35 crc kubenswrapper[4730]: I0930 11:42:35.174487 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4eb0bfb8-0c6a-4d5f-9b85-e06888511203/proxy-httpd/0.log" Sep 30 11:42:35 crc kubenswrapper[4730]: I0930 11:42:35.239133 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:35 crc kubenswrapper[4730]: I0930 11:42:35.254224 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_4eb0bfb8-0c6a-4d5f-9b85-e06888511203/sg-core/0.log" Sep 30 11:42:35 crc kubenswrapper[4730]: I0930 11:42:35.684936 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-dlnrw_d0e46e4d-3e83-4381-b519-f840ffb1911b/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:35 crc kubenswrapper[4730]: I0930 11:42:35.724865 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h2qp6"] Sep 30 11:42:35 crc kubenswrapper[4730]: W0930 11:42:35.728826 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod481fb01d_12be_4f70_8f95_bd250c953c74.slice/crio-1ec685a4f94689c04ea98ec735029c450a3a05895e0fffc292f409e8957930b6 WatchSource:0}: Error finding container 1ec685a4f94689c04ea98ec735029c450a3a05895e0fffc292f409e8957930b6: Status 404 returned error can't find the container with id 1ec685a4f94689c04ea98ec735029c450a3a05895e0fffc292f409e8957930b6 Sep 30 11:42:35 crc kubenswrapper[4730]: I0930 11:42:35.981636 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-cwj6s_15b0b939-744d-4ea8-8ff3-942843d32348/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:36 crc kubenswrapper[4730]: I0930 11:42:36.528514 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_063272c9-558f-47d6-84c0-aa0de64bb715/cinder-api-log/0.log" Sep 30 11:42:36 crc kubenswrapper[4730]: I0930 11:42:36.730987 4730 generic.go:334] "Generic (PLEG): container finished" podID="481fb01d-12be-4f70-8f95-bd250c953c74" containerID="dcfcc7e9ab840044e61cd0d7fc8eb76af537a66ed582eb8ab15294c6b331a222" exitCode=0 Sep 30 11:42:36 crc kubenswrapper[4730]: I0930 11:42:36.731058 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2qp6" event={"ID":"481fb01d-12be-4f70-8f95-bd250c953c74","Type":"ContainerDied","Data":"dcfcc7e9ab840044e61cd0d7fc8eb76af537a66ed582eb8ab15294c6b331a222"} Sep 30 11:42:36 crc kubenswrapper[4730]: I0930 11:42:36.732020 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2qp6" event={"ID":"481fb01d-12be-4f70-8f95-bd250c953c74","Type":"ContainerStarted","Data":"1ec685a4f94689c04ea98ec735029c450a3a05895e0fffc292f409e8957930b6"} Sep 30 11:42:37 crc kubenswrapper[4730]: I0930 11:42:37.692882 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_063272c9-558f-47d6-84c0-aa0de64bb715/cinder-api/0.log" Sep 30 11:42:37 crc kubenswrapper[4730]: I0930 11:42:37.761600 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2qp6" event={"ID":"481fb01d-12be-4f70-8f95-bd250c953c74","Type":"ContainerStarted","Data":"75609c057edc9fb6694dec1ce5a79d38bdd9e8c2af3bd09f0c71e97ada058df7"} Sep 30 11:42:37 crc kubenswrapper[4730]: I0930 11:42:37.822254 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_0983f2ba-1e23-492a-abb5-5cc4f4199925/cinder-backup/0.log" Sep 30 11:42:37 crc kubenswrapper[4730]: I0930 11:42:37.912858 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_0983f2ba-1e23-492a-abb5-5cc4f4199925/probe/0.log" Sep 30 11:42:38 crc kubenswrapper[4730]: I0930 11:42:38.152824 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09/cinder-scheduler/0.log" Sep 30 11:42:38 crc kubenswrapper[4730]: I0930 11:42:38.254698 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_a7f343db-0bd4-4e21-ae2d-0b61a4aa0e09/probe/0.log" Sep 30 11:42:38 crc kubenswrapper[4730]: I0930 11:42:38.654137 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_ad078102-347b-4f85-8fa5-f83cbf35c06a/probe/0.log" Sep 30 11:42:38 crc kubenswrapper[4730]: I0930 11:42:38.702652 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_ad078102-347b-4f85-8fa5-f83cbf35c06a/cinder-volume/0.log" Sep 30 11:42:39 crc kubenswrapper[4730]: I0930 11:42:39.086115 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume2-0_12c02e5f-fb4a-46e7-8772-07bbe148bdcd/probe/0.log" Sep 30 11:42:39 crc kubenswrapper[4730]: I0930 11:42:39.170791 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume2-0_12c02e5f-fb4a-46e7-8772-07bbe148bdcd/cinder-volume/0.log" Sep 30 11:42:39 crc kubenswrapper[4730]: I0930 11:42:39.353979 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-jrhtl_076a53c1-4f43-4c11-b67a-163d1fe06287/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:39 crc kubenswrapper[4730]: I0930 11:42:39.569130 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-mtwx8_64074625-ee39-4163-afbf-bc8e220b63e7/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:39 crc kubenswrapper[4730]: I0930 11:42:39.742914 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5c4f8cc8c-r8gbc_7562489e-f18e-470f-a208-8479d49513f9/init/0.log" Sep 30 11:42:40 crc kubenswrapper[4730]: I0930 11:42:40.001526 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5c4f8cc8c-r8gbc_7562489e-f18e-470f-a208-8479d49513f9/init/0.log" Sep 30 11:42:40 crc kubenswrapper[4730]: I0930 11:42:40.668037 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5c4f8cc8c-r8gbc_7562489e-f18e-470f-a208-8479d49513f9/dnsmasq-dns/0.log" Sep 30 11:42:40 crc kubenswrapper[4730]: I0930 11:42:40.815990 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e5bde710-0351-45d0-acb8-990719f9ba34/glance-httpd/0.log" Sep 30 11:42:40 crc kubenswrapper[4730]: I0930 11:42:40.952277 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e5bde710-0351-45d0-acb8-990719f9ba34/glance-log/0.log" Sep 30 11:42:41 crc kubenswrapper[4730]: I0930 11:42:41.078244 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8532ec07-5ee5-40c9-82f5-df62806d03f5/glance-httpd/0.log" Sep 30 11:42:41 crc kubenswrapper[4730]: I0930 11:42:41.162973 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_8532ec07-5ee5-40c9-82f5-df62806d03f5/glance-log/0.log" Sep 30 11:42:41 crc kubenswrapper[4730]: I0930 11:42:41.358213 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6b9f68988b-b4q58_73859337-4ff6-4ada-bc9b-a29b6b1fc478/horizon/0.log" Sep 30 11:42:41 crc kubenswrapper[4730]: I0930 11:42:41.555598 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-47s8l_c191d318-9d8c-4aac-bbc9-371553bb29bf/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:41 crc kubenswrapper[4730]: I0930 11:42:41.807821 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-xhgzq_9e488e17-15bf-414e-b0cb-e5b3dbf22769/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:41 crc kubenswrapper[4730]: I0930 11:42:41.813832 4730 generic.go:334] "Generic (PLEG): container finished" podID="481fb01d-12be-4f70-8f95-bd250c953c74" containerID="75609c057edc9fb6694dec1ce5a79d38bdd9e8c2af3bd09f0c71e97ada058df7" exitCode=0 Sep 30 11:42:41 crc kubenswrapper[4730]: I0930 11:42:41.813879 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2qp6" event={"ID":"481fb01d-12be-4f70-8f95-bd250c953c74","Type":"ContainerDied","Data":"75609c057edc9fb6694dec1ce5a79d38bdd9e8c2af3bd09f0c71e97ada058df7"} Sep 30 11:42:41 crc kubenswrapper[4730]: I0930 11:42:41.876201 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6b9f68988b-b4q58_73859337-4ff6-4ada-bc9b-a29b6b1fc478/horizon-log/0.log" Sep 30 11:42:42 crc kubenswrapper[4730]: I0930 11:42:42.184415 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29320501-wlbqz_8a0c7d59-5820-461e-a4ee-7ff69b8feadd/keystone-cron/0.log" Sep 30 11:42:42 crc kubenswrapper[4730]: I0930 11:42:42.478184 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_5c0ac696-252a-4b32-8086-a6d3a02e945f/kube-state-metrics/0.log" Sep 30 11:42:42 crc kubenswrapper[4730]: I0930 11:42:42.585506 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7dfc9d946-psk9c_fdc671fd-529e-484c-9924-355c64d393ff/keystone-api/0.log" Sep 30 11:42:42 crc kubenswrapper[4730]: I0930 11:42:42.693236 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-qcc5h_114c62cf-b040-491e-90fa-794b4cc29361/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:42 crc kubenswrapper[4730]: I0930 11:42:42.825541 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2qp6" event={"ID":"481fb01d-12be-4f70-8f95-bd250c953c74","Type":"ContainerStarted","Data":"b98ba7f0120bc5a51365d0b0345cb9869f871d9de78ce1784cd1a394c4b81f05"} Sep 30 11:42:42 crc kubenswrapper[4730]: I0930 11:42:42.851906 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-h2qp6" podStartSLOduration=3.326136113 podStartE2EDuration="8.851884743s" podCreationTimestamp="2025-09-30 11:42:34 +0000 UTC" firstStartedPulling="2025-09-30 11:42:36.732974358 +0000 UTC m=+6801.066234351" lastFinishedPulling="2025-09-30 11:42:42.258722978 +0000 UTC m=+6806.591982981" observedRunningTime="2025-09-30 11:42:42.847965409 +0000 UTC m=+6807.181225402" watchObservedRunningTime="2025-09-30 11:42:42.851884743 +0000 UTC m=+6807.185144736" Sep 30 11:42:43 crc kubenswrapper[4730]: I0930 11:42:43.474760 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9cf79fcd5-4nrfn_2cb5a17e-0f48-4341-9a0e-9c84e63fed3b/neutron-api/0.log" Sep 30 11:42:43 crc kubenswrapper[4730]: I0930 11:42:43.506640 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-9cf79fcd5-4nrfn_2cb5a17e-0f48-4341-9a0e-9c84e63fed3b/neutron-httpd/0.log" Sep 30 11:42:43 crc kubenswrapper[4730]: I0930 11:42:43.747397 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-xtwz8_25b53534-c380-44dd-aa82-22606c2a5d22/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:45 crc kubenswrapper[4730]: I0930 11:42:45.239297 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:45 crc kubenswrapper[4730]: I0930 11:42:45.239668 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:46 crc kubenswrapper[4730]: I0930 11:42:46.059103 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0332adf1-8148-4923-9273-1ef8869dfad1/nova-api-log/0.log" Sep 30 11:42:46 crc kubenswrapper[4730]: I0930 11:42:46.299006 4730 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-h2qp6" podUID="481fb01d-12be-4f70-8f95-bd250c953c74" containerName="registry-server" probeResult="failure" output=< Sep 30 11:42:46 crc kubenswrapper[4730]: timeout: failed to connect service ":50051" within 1s Sep 30 11:42:46 crc kubenswrapper[4730]: > Sep 30 11:42:46 crc kubenswrapper[4730]: I0930 11:42:46.320943 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0332adf1-8148-4923-9273-1ef8869dfad1/nova-api-api/0.log" Sep 30 11:42:46 crc kubenswrapper[4730]: I0930 11:42:46.588755 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_4b30e43c-c603-4ad2-a95b-8a8a1ac5a46a/nova-cell0-conductor-conductor/0.log" Sep 30 11:42:46 crc kubenswrapper[4730]: I0930 11:42:46.744716 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_4521e8b6-3634-4d50-9050-0fccded8d973/nova-cell1-conductor-conductor/0.log" Sep 30 11:42:47 crc kubenswrapper[4730]: I0930 11:42:47.141067 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_ed5ae6a3-8d5a-4fd9-8de9-380e7a54ce4c/nova-cell1-novncproxy-novncproxy/0.log" Sep 30 11:42:47 crc kubenswrapper[4730]: I0930 11:42:47.175676 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-mkqvk_80c10bdf-95bb-4372-ba25-b7bd5f563225/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:47 crc kubenswrapper[4730]: I0930 11:42:47.456577 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a3c8895e-0bd8-4e06-a121-3afe3bcdf54f/nova-metadata-log/0.log" Sep 30 11:42:47 crc kubenswrapper[4730]: I0930 11:42:47.963178 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c1e0179d-6dc3-4dec-8ff6-48c794add5a3/mysql-bootstrap/0.log" Sep 30 11:42:48 crc kubenswrapper[4730]: I0930 11:42:48.151600 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c1e0179d-6dc3-4dec-8ff6-48c794add5a3/mysql-bootstrap/0.log" Sep 30 11:42:48 crc kubenswrapper[4730]: I0930 11:42:48.208655 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_fca33d2f-4ffc-40bf-a02b-f5f757ca2d1f/nova-scheduler-scheduler/0.log" Sep 30 11:42:48 crc kubenswrapper[4730]: I0930 11:42:48.391088 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_c1e0179d-6dc3-4dec-8ff6-48c794add5a3/galera/0.log" Sep 30 11:42:48 crc kubenswrapper[4730]: I0930 11:42:48.645720 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1/mysql-bootstrap/0.log" Sep 30 11:42:48 crc kubenswrapper[4730]: I0930 11:42:48.884358 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1/mysql-bootstrap/0.log" Sep 30 11:42:48 crc kubenswrapper[4730]: I0930 11:42:48.893198 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_28cd8cfe-8aa0-4ba3-bbfc-a8475c534fa1/galera/0.log" Sep 30 11:42:49 crc kubenswrapper[4730]: I0930 11:42:49.064204 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_7177e538-cc55-44d5-9274-67a54b79f589/openstackclient/0.log" Sep 30 11:42:49 crc kubenswrapper[4730]: I0930 11:42:49.334579 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-4tlvb_d247bfe5-48d7-49be-9cd4-2d3368015e3a/ovn-controller/0.log" Sep 30 11:42:49 crc kubenswrapper[4730]: I0930 11:42:49.516163 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-zh8k5_f609f208-0734-4364-b05e-0364bded655e/openstack-network-exporter/0.log" Sep 30 11:42:49 crc kubenswrapper[4730]: I0930 11:42:49.776801 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wb9fw_43c558ac-76c0-4c01-a265-41320a386add/ovsdb-server-init/0.log" Sep 30 11:42:49 crc kubenswrapper[4730]: I0930 11:42:49.952371 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wb9fw_43c558ac-76c0-4c01-a265-41320a386add/ovsdb-server-init/0.log" Sep 30 11:42:50 crc kubenswrapper[4730]: I0930 11:42:50.115420 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wb9fw_43c558ac-76c0-4c01-a265-41320a386add/ovsdb-server/0.log" Sep 30 11:42:50 crc kubenswrapper[4730]: I0930 11:42:50.378124 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-wb9fw_43c558ac-76c0-4c01-a265-41320a386add/ovs-vswitchd/0.log" Sep 30 11:42:50 crc kubenswrapper[4730]: I0930 11:42:50.535406 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-br6q6_ae1fcc30-65e0-4f9d-9ffa-bd87c1effd02/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:50 crc kubenswrapper[4730]: I0930 11:42:50.685362 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a3c8895e-0bd8-4e06-a121-3afe3bcdf54f/nova-metadata-metadata/0.log" Sep 30 11:42:50 crc kubenswrapper[4730]: I0930 11:42:50.800014 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_841c63b2-fda6-4269-8ba8-8567555326b4/openstack-network-exporter/0.log" Sep 30 11:42:50 crc kubenswrapper[4730]: I0930 11:42:50.848663 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_841c63b2-fda6-4269-8ba8-8567555326b4/ovn-northd/0.log" Sep 30 11:42:50 crc kubenswrapper[4730]: I0930 11:42:50.989455 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_d84e120e-743b-4797-aa0a-e231ecfa59ab/openstack-network-exporter/0.log" Sep 30 11:42:51 crc kubenswrapper[4730]: I0930 11:42:51.041773 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_d84e120e-743b-4797-aa0a-e231ecfa59ab/ovsdbserver-nb/0.log" Sep 30 11:42:51 crc kubenswrapper[4730]: I0930 11:42:51.158734 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5e6063f5-43cd-45ec-9ac3-4de0fd55cb15/openstack-network-exporter/0.log" Sep 30 11:42:51 crc kubenswrapper[4730]: I0930 11:42:51.257384 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5e6063f5-43cd-45ec-9ac3-4de0fd55cb15/ovsdbserver-sb/0.log" Sep 30 11:42:51 crc kubenswrapper[4730]: I0930 11:42:51.729294 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7d48d7c7fd-7l8hx_57cdca52-f1f9-48c7-8fb6-9144a033c957/placement-api/0.log" Sep 30 11:42:51 crc kubenswrapper[4730]: I0930 11:42:51.820451 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7d48d7c7fd-7l8hx_57cdca52-f1f9-48c7-8fb6-9144a033c957/placement-log/0.log" Sep 30 11:42:51 crc kubenswrapper[4730]: I0930 11:42:51.912243 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aae3dec7-c6e3-4bd3-ad48-96d4d959d228/init-config-reloader/0.log" Sep 30 11:42:52 crc kubenswrapper[4730]: I0930 11:42:52.117848 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aae3dec7-c6e3-4bd3-ad48-96d4d959d228/init-config-reloader/0.log" Sep 30 11:42:52 crc kubenswrapper[4730]: I0930 11:42:52.132131 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aae3dec7-c6e3-4bd3-ad48-96d4d959d228/prometheus/0.log" Sep 30 11:42:52 crc kubenswrapper[4730]: I0930 11:42:52.231727 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aae3dec7-c6e3-4bd3-ad48-96d4d959d228/config-reloader/0.log" Sep 30 11:42:52 crc kubenswrapper[4730]: I0930 11:42:52.380594 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_aae3dec7-c6e3-4bd3-ad48-96d4d959d228/thanos-sidecar/0.log" Sep 30 11:42:52 crc kubenswrapper[4730]: I0930 11:42:52.452087 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a3b79a67-6ca7-44cd-8108-9afb64437809/setup-container/0.log" Sep 30 11:42:52 crc kubenswrapper[4730]: I0930 11:42:52.707424 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a3b79a67-6ca7-44cd-8108-9afb64437809/setup-container/0.log" Sep 30 11:42:52 crc kubenswrapper[4730]: I0930 11:42:52.718729 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_a3b79a67-6ca7-44cd-8108-9afb64437809/rabbitmq/0.log" Sep 30 11:42:52 crc kubenswrapper[4730]: I0930 11:42:52.907156 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_7ba6b518-edfa-4d19-b096-03d7d96c51a3/setup-container/0.log" Sep 30 11:42:53 crc kubenswrapper[4730]: I0930 11:42:53.076415 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_7ba6b518-edfa-4d19-b096-03d7d96c51a3/setup-container/0.log" Sep 30 11:42:53 crc kubenswrapper[4730]: I0930 11:42:53.162272 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-notifications-server-0_7ba6b518-edfa-4d19-b096-03d7d96c51a3/rabbitmq/0.log" Sep 30 11:42:53 crc kubenswrapper[4730]: I0930 11:42:53.288174 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a1a78aec-c35b-41c6-a1e0-43fba77e84fd/setup-container/0.log" Sep 30 11:42:53 crc kubenswrapper[4730]: I0930 11:42:53.429449 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a1a78aec-c35b-41c6-a1e0-43fba77e84fd/setup-container/0.log" Sep 30 11:42:53 crc kubenswrapper[4730]: I0930 11:42:53.527731 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a1a78aec-c35b-41c6-a1e0-43fba77e84fd/rabbitmq/0.log" Sep 30 11:42:53 crc kubenswrapper[4730]: I0930 11:42:53.648706 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-ztvhs_b5e99fb9-fd07-410f-8c9b-bde6849b5655/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:53 crc kubenswrapper[4730]: I0930 11:42:53.701905 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-wqj6f_9c876aea-c4ac-4055-953d-9bedb3615be5/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:53 crc kubenswrapper[4730]: I0930 11:42:53.902424 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-9xt9m_cda7e260-a520-4ac4-a1f4-b8e7684d2742/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:54 crc kubenswrapper[4730]: I0930 11:42:54.111991 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-2p45l_9a25d6ab-b556-4812-b876-92f7574e6da9/ssh-known-hosts-edpm-deployment/0.log" Sep 30 11:42:54 crc kubenswrapper[4730]: I0930 11:42:54.374389 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-plcgw_9a9e7f46-a278-48e1-9171-826bbba2fe2b/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:54 crc kubenswrapper[4730]: I0930 11:42:54.432392 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_5d6d300c-5857-4de1-8317-cded656bc61e/tempest-tests-tempest-tests-runner/0.log" Sep 30 11:42:54 crc kubenswrapper[4730]: I0930 11:42:54.548399 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_c10a38bd-1150-4f8a-b74d-5f6c7498387b/test-operator-logs-container/0.log" Sep 30 11:42:54 crc kubenswrapper[4730]: I0930 11:42:54.772764 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-jsrw2_60081e7d-07fd-48ac-a4ae-46f05ab4d935/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 11:42:55 crc kubenswrapper[4730]: I0930 11:42:55.308365 4730 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:55 crc kubenswrapper[4730]: I0930 11:42:55.359850 4730 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:55 crc kubenswrapper[4730]: I0930 11:42:55.548347 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h2qp6"] Sep 30 11:42:56 crc kubenswrapper[4730]: I0930 11:42:56.062513 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-applier-0_db8ecf95-5b86-4775-85ee-a3da046e9dba/watcher-applier/0.log" Sep 30 11:42:56 crc kubenswrapper[4730]: I0930 11:42:56.275974 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_4caba7a4-0751-4410-88f1-084d5289d1c6/watcher-api-log/0.log" Sep 30 11:42:56 crc kubenswrapper[4730]: I0930 11:42:56.584851 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_a4f9bd21-5f86-4443-87be-eadb5d1c77f9/watcher-decision-engine/2.log" Sep 30 11:42:56 crc kubenswrapper[4730]: I0930 11:42:56.960833 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-h2qp6" podUID="481fb01d-12be-4f70-8f95-bd250c953c74" containerName="registry-server" containerID="cri-o://b98ba7f0120bc5a51365d0b0345cb9869f871d9de78ce1784cd1a394c4b81f05" gracePeriod=2 Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.463770 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.534179 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/481fb01d-12be-4f70-8f95-bd250c953c74-catalog-content\") pod \"481fb01d-12be-4f70-8f95-bd250c953c74\" (UID: \"481fb01d-12be-4f70-8f95-bd250c953c74\") " Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.534362 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t64m2\" (UniqueName: \"kubernetes.io/projected/481fb01d-12be-4f70-8f95-bd250c953c74-kube-api-access-t64m2\") pod \"481fb01d-12be-4f70-8f95-bd250c953c74\" (UID: \"481fb01d-12be-4f70-8f95-bd250c953c74\") " Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.534576 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/481fb01d-12be-4f70-8f95-bd250c953c74-utilities\") pod \"481fb01d-12be-4f70-8f95-bd250c953c74\" (UID: \"481fb01d-12be-4f70-8f95-bd250c953c74\") " Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.536714 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/481fb01d-12be-4f70-8f95-bd250c953c74-utilities" (OuterVolumeSpecName: "utilities") pod "481fb01d-12be-4f70-8f95-bd250c953c74" (UID: "481fb01d-12be-4f70-8f95-bd250c953c74"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.541670 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/481fb01d-12be-4f70-8f95-bd250c953c74-kube-api-access-t64m2" (OuterVolumeSpecName: "kube-api-access-t64m2") pod "481fb01d-12be-4f70-8f95-bd250c953c74" (UID: "481fb01d-12be-4f70-8f95-bd250c953c74"). InnerVolumeSpecName "kube-api-access-t64m2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.636702 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t64m2\" (UniqueName: \"kubernetes.io/projected/481fb01d-12be-4f70-8f95-bd250c953c74-kube-api-access-t64m2\") on node \"crc\" DevicePath \"\"" Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.636741 4730 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/481fb01d-12be-4f70-8f95-bd250c953c74-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.663339 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/481fb01d-12be-4f70-8f95-bd250c953c74-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "481fb01d-12be-4f70-8f95-bd250c953c74" (UID: "481fb01d-12be-4f70-8f95-bd250c953c74"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.738371 4730 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/481fb01d-12be-4f70-8f95-bd250c953c74-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.970767 4730 generic.go:334] "Generic (PLEG): container finished" podID="481fb01d-12be-4f70-8f95-bd250c953c74" containerID="b98ba7f0120bc5a51365d0b0345cb9869f871d9de78ce1784cd1a394c4b81f05" exitCode=0 Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.970816 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2qp6" event={"ID":"481fb01d-12be-4f70-8f95-bd250c953c74","Type":"ContainerDied","Data":"b98ba7f0120bc5a51365d0b0345cb9869f871d9de78ce1784cd1a394c4b81f05"} Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.970846 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2qp6" event={"ID":"481fb01d-12be-4f70-8f95-bd250c953c74","Type":"ContainerDied","Data":"1ec685a4f94689c04ea98ec735029c450a3a05895e0fffc292f409e8957930b6"} Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.970865 4730 scope.go:117] "RemoveContainer" containerID="b98ba7f0120bc5a51365d0b0345cb9869f871d9de78ce1784cd1a394c4b81f05" Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.970920 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h2qp6" Sep 30 11:42:57 crc kubenswrapper[4730]: I0930 11:42:57.993517 4730 scope.go:117] "RemoveContainer" containerID="75609c057edc9fb6694dec1ce5a79d38bdd9e8c2af3bd09f0c71e97ada058df7" Sep 30 11:42:58 crc kubenswrapper[4730]: I0930 11:42:58.008734 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h2qp6"] Sep 30 11:42:58 crc kubenswrapper[4730]: I0930 11:42:58.018835 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-h2qp6"] Sep 30 11:42:58 crc kubenswrapper[4730]: I0930 11:42:58.027638 4730 scope.go:117] "RemoveContainer" containerID="dcfcc7e9ab840044e61cd0d7fc8eb76af537a66ed582eb8ab15294c6b331a222" Sep 30 11:42:58 crc kubenswrapper[4730]: I0930 11:42:58.064856 4730 scope.go:117] "RemoveContainer" containerID="b98ba7f0120bc5a51365d0b0345cb9869f871d9de78ce1784cd1a394c4b81f05" Sep 30 11:42:58 crc kubenswrapper[4730]: E0930 11:42:58.065330 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b98ba7f0120bc5a51365d0b0345cb9869f871d9de78ce1784cd1a394c4b81f05\": container with ID starting with b98ba7f0120bc5a51365d0b0345cb9869f871d9de78ce1784cd1a394c4b81f05 not found: ID does not exist" containerID="b98ba7f0120bc5a51365d0b0345cb9869f871d9de78ce1784cd1a394c4b81f05" Sep 30 11:42:58 crc kubenswrapper[4730]: I0930 11:42:58.065371 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b98ba7f0120bc5a51365d0b0345cb9869f871d9de78ce1784cd1a394c4b81f05"} err="failed to get container status \"b98ba7f0120bc5a51365d0b0345cb9869f871d9de78ce1784cd1a394c4b81f05\": rpc error: code = NotFound desc = could not find container \"b98ba7f0120bc5a51365d0b0345cb9869f871d9de78ce1784cd1a394c4b81f05\": container with ID starting with b98ba7f0120bc5a51365d0b0345cb9869f871d9de78ce1784cd1a394c4b81f05 not found: ID does not exist" Sep 30 11:42:58 crc kubenswrapper[4730]: I0930 11:42:58.065404 4730 scope.go:117] "RemoveContainer" containerID="75609c057edc9fb6694dec1ce5a79d38bdd9e8c2af3bd09f0c71e97ada058df7" Sep 30 11:42:58 crc kubenswrapper[4730]: E0930 11:42:58.065783 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75609c057edc9fb6694dec1ce5a79d38bdd9e8c2af3bd09f0c71e97ada058df7\": container with ID starting with 75609c057edc9fb6694dec1ce5a79d38bdd9e8c2af3bd09f0c71e97ada058df7 not found: ID does not exist" containerID="75609c057edc9fb6694dec1ce5a79d38bdd9e8c2af3bd09f0c71e97ada058df7" Sep 30 11:42:58 crc kubenswrapper[4730]: I0930 11:42:58.065820 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75609c057edc9fb6694dec1ce5a79d38bdd9e8c2af3bd09f0c71e97ada058df7"} err="failed to get container status \"75609c057edc9fb6694dec1ce5a79d38bdd9e8c2af3bd09f0c71e97ada058df7\": rpc error: code = NotFound desc = could not find container \"75609c057edc9fb6694dec1ce5a79d38bdd9e8c2af3bd09f0c71e97ada058df7\": container with ID starting with 75609c057edc9fb6694dec1ce5a79d38bdd9e8c2af3bd09f0c71e97ada058df7 not found: ID does not exist" Sep 30 11:42:58 crc kubenswrapper[4730]: I0930 11:42:58.065840 4730 scope.go:117] "RemoveContainer" containerID="dcfcc7e9ab840044e61cd0d7fc8eb76af537a66ed582eb8ab15294c6b331a222" Sep 30 11:42:58 crc kubenswrapper[4730]: E0930 11:42:58.066028 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcfcc7e9ab840044e61cd0d7fc8eb76af537a66ed582eb8ab15294c6b331a222\": container with ID starting with dcfcc7e9ab840044e61cd0d7fc8eb76af537a66ed582eb8ab15294c6b331a222 not found: ID does not exist" containerID="dcfcc7e9ab840044e61cd0d7fc8eb76af537a66ed582eb8ab15294c6b331a222" Sep 30 11:42:58 crc kubenswrapper[4730]: I0930 11:42:58.066047 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcfcc7e9ab840044e61cd0d7fc8eb76af537a66ed582eb8ab15294c6b331a222"} err="failed to get container status \"dcfcc7e9ab840044e61cd0d7fc8eb76af537a66ed582eb8ab15294c6b331a222\": rpc error: code = NotFound desc = could not find container \"dcfcc7e9ab840044e61cd0d7fc8eb76af537a66ed582eb8ab15294c6b331a222\": container with ID starting with dcfcc7e9ab840044e61cd0d7fc8eb76af537a66ed582eb8ab15294c6b331a222 not found: ID does not exist" Sep 30 11:42:58 crc kubenswrapper[4730]: I0930 11:42:58.404130 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="481fb01d-12be-4f70-8f95-bd250c953c74" path="/var/lib/kubelet/pods/481fb01d-12be-4f70-8f95-bd250c953c74/volumes" Sep 30 11:43:00 crc kubenswrapper[4730]: I0930 11:43:00.672102 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-decision-engine-0_a4f9bd21-5f86-4443-87be-eadb5d1c77f9/watcher-decision-engine/3.log" Sep 30 11:43:01 crc kubenswrapper[4730]: I0930 11:43:01.521562 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_watcher-api-0_4caba7a4-0751-4410-88f1-084d5289d1c6/watcher-api/0.log" Sep 30 11:43:02 crc kubenswrapper[4730]: I0930 11:43:02.337776 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:43:02 crc kubenswrapper[4730]: I0930 11:43:02.338078 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:43:02 crc kubenswrapper[4730]: I0930 11:43:02.338132 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 11:43:02 crc kubenswrapper[4730]: I0930 11:43:02.339079 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f8da3276b1fecb6794ec8f0d4d10c4ec11116640fed8ffd3e1e680a043c05017"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 11:43:02 crc kubenswrapper[4730]: I0930 11:43:02.339145 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://f8da3276b1fecb6794ec8f0d4d10c4ec11116640fed8ffd3e1e680a043c05017" gracePeriod=600 Sep 30 11:43:03 crc kubenswrapper[4730]: I0930 11:43:03.059690 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="f8da3276b1fecb6794ec8f0d4d10c4ec11116640fed8ffd3e1e680a043c05017" exitCode=0 Sep 30 11:43:03 crc kubenswrapper[4730]: I0930 11:43:03.059765 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"f8da3276b1fecb6794ec8f0d4d10c4ec11116640fed8ffd3e1e680a043c05017"} Sep 30 11:43:03 crc kubenswrapper[4730]: I0930 11:43:03.060108 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerStarted","Data":"c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d"} Sep 30 11:43:03 crc kubenswrapper[4730]: I0930 11:43:03.060131 4730 scope.go:117] "RemoveContainer" containerID="1ca4e75765a6b0ce8ace2aa5038325030430c2be92cc16d25c232bed60293e3b" Sep 30 11:43:05 crc kubenswrapper[4730]: I0930 11:43:05.986199 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_c7ad8423-527a-4195-8e63-d04e2bed66c9/memcached/0.log" Sep 30 11:43:13 crc kubenswrapper[4730]: I0930 11:43:13.145491 4730 generic.go:334] "Generic (PLEG): container finished" podID="fd263d56-6ba6-4190-a317-f226ec12d3e9" containerID="0d8f899c7563130dc54902c438f300c84b40731e29fd135d19b985f619a43422" exitCode=0 Sep 30 11:43:13 crc kubenswrapper[4730]: I0930 11:43:13.145572 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" event={"ID":"fd263d56-6ba6-4190-a317-f226ec12d3e9","Type":"ContainerDied","Data":"0d8f899c7563130dc54902c438f300c84b40731e29fd135d19b985f619a43422"} Sep 30 11:43:14 crc kubenswrapper[4730]: I0930 11:43:14.270605 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" Sep 30 11:43:14 crc kubenswrapper[4730]: I0930 11:43:14.305960 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jbr8l/crc-debug-fhjl7"] Sep 30 11:43:14 crc kubenswrapper[4730]: I0930 11:43:14.315154 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jbr8l/crc-debug-fhjl7"] Sep 30 11:43:14 crc kubenswrapper[4730]: I0930 11:43:14.387228 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd263d56-6ba6-4190-a317-f226ec12d3e9-host\") pod \"fd263d56-6ba6-4190-a317-f226ec12d3e9\" (UID: \"fd263d56-6ba6-4190-a317-f226ec12d3e9\") " Sep 30 11:43:14 crc kubenswrapper[4730]: I0930 11:43:14.387335 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fd263d56-6ba6-4190-a317-f226ec12d3e9-host" (OuterVolumeSpecName: "host") pod "fd263d56-6ba6-4190-a317-f226ec12d3e9" (UID: "fd263d56-6ba6-4190-a317-f226ec12d3e9"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 11:43:14 crc kubenswrapper[4730]: I0930 11:43:14.387395 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcsfs\" (UniqueName: \"kubernetes.io/projected/fd263d56-6ba6-4190-a317-f226ec12d3e9-kube-api-access-lcsfs\") pod \"fd263d56-6ba6-4190-a317-f226ec12d3e9\" (UID: \"fd263d56-6ba6-4190-a317-f226ec12d3e9\") " Sep 30 11:43:14 crc kubenswrapper[4730]: I0930 11:43:14.387973 4730 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd263d56-6ba6-4190-a317-f226ec12d3e9-host\") on node \"crc\" DevicePath \"\"" Sep 30 11:43:14 crc kubenswrapper[4730]: I0930 11:43:14.410438 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd263d56-6ba6-4190-a317-f226ec12d3e9-kube-api-access-lcsfs" (OuterVolumeSpecName: "kube-api-access-lcsfs") pod "fd263d56-6ba6-4190-a317-f226ec12d3e9" (UID: "fd263d56-6ba6-4190-a317-f226ec12d3e9"). InnerVolumeSpecName "kube-api-access-lcsfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:43:14 crc kubenswrapper[4730]: I0930 11:43:14.489835 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcsfs\" (UniqueName: \"kubernetes.io/projected/fd263d56-6ba6-4190-a317-f226ec12d3e9-kube-api-access-lcsfs\") on node \"crc\" DevicePath \"\"" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.164378 4730 scope.go:117] "RemoveContainer" containerID="0d8f899c7563130dc54902c438f300c84b40731e29fd135d19b985f619a43422" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.164444 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/crc-debug-fhjl7" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.505925 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jbr8l/crc-debug-p7wqw"] Sep 30 11:43:15 crc kubenswrapper[4730]: E0930 11:43:15.506325 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="481fb01d-12be-4f70-8f95-bd250c953c74" containerName="extract-utilities" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.506338 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="481fb01d-12be-4f70-8f95-bd250c953c74" containerName="extract-utilities" Sep 30 11:43:15 crc kubenswrapper[4730]: E0930 11:43:15.506358 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="481fb01d-12be-4f70-8f95-bd250c953c74" containerName="extract-content" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.506364 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="481fb01d-12be-4f70-8f95-bd250c953c74" containerName="extract-content" Sep 30 11:43:15 crc kubenswrapper[4730]: E0930 11:43:15.506387 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="481fb01d-12be-4f70-8f95-bd250c953c74" containerName="registry-server" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.506393 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="481fb01d-12be-4f70-8f95-bd250c953c74" containerName="registry-server" Sep 30 11:43:15 crc kubenswrapper[4730]: E0930 11:43:15.506408 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd263d56-6ba6-4190-a317-f226ec12d3e9" containerName="container-00" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.506415 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd263d56-6ba6-4190-a317-f226ec12d3e9" containerName="container-00" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.506630 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd263d56-6ba6-4190-a317-f226ec12d3e9" containerName="container-00" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.506645 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="481fb01d-12be-4f70-8f95-bd250c953c74" containerName="registry-server" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.507290 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.509253 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-jbr8l"/"default-dockercfg-tbwfg" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.612600 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r6dt\" (UniqueName: \"kubernetes.io/projected/1d6661dc-46cf-41a2-a49f-88ff5a0702af-kube-api-access-7r6dt\") pod \"crc-debug-p7wqw\" (UID: \"1d6661dc-46cf-41a2-a49f-88ff5a0702af\") " pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.613157 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d6661dc-46cf-41a2-a49f-88ff5a0702af-host\") pod \"crc-debug-p7wqw\" (UID: \"1d6661dc-46cf-41a2-a49f-88ff5a0702af\") " pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.715070 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r6dt\" (UniqueName: \"kubernetes.io/projected/1d6661dc-46cf-41a2-a49f-88ff5a0702af-kube-api-access-7r6dt\") pod \"crc-debug-p7wqw\" (UID: \"1d6661dc-46cf-41a2-a49f-88ff5a0702af\") " pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.715400 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d6661dc-46cf-41a2-a49f-88ff5a0702af-host\") pod \"crc-debug-p7wqw\" (UID: \"1d6661dc-46cf-41a2-a49f-88ff5a0702af\") " pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.715563 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d6661dc-46cf-41a2-a49f-88ff5a0702af-host\") pod \"crc-debug-p7wqw\" (UID: \"1d6661dc-46cf-41a2-a49f-88ff5a0702af\") " pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.732330 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r6dt\" (UniqueName: \"kubernetes.io/projected/1d6661dc-46cf-41a2-a49f-88ff5a0702af-kube-api-access-7r6dt\") pod \"crc-debug-p7wqw\" (UID: \"1d6661dc-46cf-41a2-a49f-88ff5a0702af\") " pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" Sep 30 11:43:15 crc kubenswrapper[4730]: I0930 11:43:15.826026 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" Sep 30 11:43:15 crc kubenswrapper[4730]: W0930 11:43:15.862855 4730 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d6661dc_46cf_41a2_a49f_88ff5a0702af.slice/crio-9eb5504230b1a81dae3f9956804386c0e8d0b339188cbac09aa6bee5e5669a0f WatchSource:0}: Error finding container 9eb5504230b1a81dae3f9956804386c0e8d0b339188cbac09aa6bee5e5669a0f: Status 404 returned error can't find the container with id 9eb5504230b1a81dae3f9956804386c0e8d0b339188cbac09aa6bee5e5669a0f Sep 30 11:43:16 crc kubenswrapper[4730]: I0930 11:43:16.179663 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" event={"ID":"1d6661dc-46cf-41a2-a49f-88ff5a0702af","Type":"ContainerStarted","Data":"a2b886807c94efa2f9e467fa8f707ba6bd422ad06e9ff4869d70f44ac7cef7da"} Sep 30 11:43:16 crc kubenswrapper[4730]: I0930 11:43:16.179756 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" event={"ID":"1d6661dc-46cf-41a2-a49f-88ff5a0702af","Type":"ContainerStarted","Data":"9eb5504230b1a81dae3f9956804386c0e8d0b339188cbac09aa6bee5e5669a0f"} Sep 30 11:43:16 crc kubenswrapper[4730]: I0930 11:43:16.196698 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" podStartSLOduration=1.196675781 podStartE2EDuration="1.196675781s" podCreationTimestamp="2025-09-30 11:43:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 11:43:16.194307189 +0000 UTC m=+6840.527567182" watchObservedRunningTime="2025-09-30 11:43:16.196675781 +0000 UTC m=+6840.529935774" Sep 30 11:43:16 crc kubenswrapper[4730]: I0930 11:43:16.397709 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd263d56-6ba6-4190-a317-f226ec12d3e9" path="/var/lib/kubelet/pods/fd263d56-6ba6-4190-a317-f226ec12d3e9/volumes" Sep 30 11:43:17 crc kubenswrapper[4730]: I0930 11:43:17.188866 4730 generic.go:334] "Generic (PLEG): container finished" podID="1d6661dc-46cf-41a2-a49f-88ff5a0702af" containerID="a2b886807c94efa2f9e467fa8f707ba6bd422ad06e9ff4869d70f44ac7cef7da" exitCode=0 Sep 30 11:43:17 crc kubenswrapper[4730]: I0930 11:43:17.188912 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" event={"ID":"1d6661dc-46cf-41a2-a49f-88ff5a0702af","Type":"ContainerDied","Data":"a2b886807c94efa2f9e467fa8f707ba6bd422ad06e9ff4869d70f44ac7cef7da"} Sep 30 11:43:18 crc kubenswrapper[4730]: I0930 11:43:18.298665 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" Sep 30 11:43:18 crc kubenswrapper[4730]: I0930 11:43:18.358600 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7r6dt\" (UniqueName: \"kubernetes.io/projected/1d6661dc-46cf-41a2-a49f-88ff5a0702af-kube-api-access-7r6dt\") pod \"1d6661dc-46cf-41a2-a49f-88ff5a0702af\" (UID: \"1d6661dc-46cf-41a2-a49f-88ff5a0702af\") " Sep 30 11:43:18 crc kubenswrapper[4730]: I0930 11:43:18.358770 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d6661dc-46cf-41a2-a49f-88ff5a0702af-host\") pod \"1d6661dc-46cf-41a2-a49f-88ff5a0702af\" (UID: \"1d6661dc-46cf-41a2-a49f-88ff5a0702af\") " Sep 30 11:43:18 crc kubenswrapper[4730]: I0930 11:43:18.358900 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1d6661dc-46cf-41a2-a49f-88ff5a0702af-host" (OuterVolumeSpecName: "host") pod "1d6661dc-46cf-41a2-a49f-88ff5a0702af" (UID: "1d6661dc-46cf-41a2-a49f-88ff5a0702af"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 11:43:18 crc kubenswrapper[4730]: I0930 11:43:18.359288 4730 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1d6661dc-46cf-41a2-a49f-88ff5a0702af-host\") on node \"crc\" DevicePath \"\"" Sep 30 11:43:18 crc kubenswrapper[4730]: I0930 11:43:18.364134 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d6661dc-46cf-41a2-a49f-88ff5a0702af-kube-api-access-7r6dt" (OuterVolumeSpecName: "kube-api-access-7r6dt") pod "1d6661dc-46cf-41a2-a49f-88ff5a0702af" (UID: "1d6661dc-46cf-41a2-a49f-88ff5a0702af"). InnerVolumeSpecName "kube-api-access-7r6dt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:43:18 crc kubenswrapper[4730]: I0930 11:43:18.460824 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7r6dt\" (UniqueName: \"kubernetes.io/projected/1d6661dc-46cf-41a2-a49f-88ff5a0702af-kube-api-access-7r6dt\") on node \"crc\" DevicePath \"\"" Sep 30 11:43:19 crc kubenswrapper[4730]: I0930 11:43:19.214408 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" event={"ID":"1d6661dc-46cf-41a2-a49f-88ff5a0702af","Type":"ContainerDied","Data":"9eb5504230b1a81dae3f9956804386c0e8d0b339188cbac09aa6bee5e5669a0f"} Sep 30 11:43:19 crc kubenswrapper[4730]: I0930 11:43:19.214464 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9eb5504230b1a81dae3f9956804386c0e8d0b339188cbac09aa6bee5e5669a0f" Sep 30 11:43:19 crc kubenswrapper[4730]: I0930 11:43:19.214559 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/crc-debug-p7wqw" Sep 30 11:43:25 crc kubenswrapper[4730]: I0930 11:43:25.789150 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jbr8l/crc-debug-p7wqw"] Sep 30 11:43:25 crc kubenswrapper[4730]: I0930 11:43:25.798572 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jbr8l/crc-debug-p7wqw"] Sep 30 11:43:26 crc kubenswrapper[4730]: I0930 11:43:26.395759 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d6661dc-46cf-41a2-a49f-88ff5a0702af" path="/var/lib/kubelet/pods/1d6661dc-46cf-41a2-a49f-88ff5a0702af/volumes" Sep 30 11:43:26 crc kubenswrapper[4730]: I0930 11:43:26.960357 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-jbr8l/crc-debug-wwsbb"] Sep 30 11:43:26 crc kubenswrapper[4730]: E0930 11:43:26.960856 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d6661dc-46cf-41a2-a49f-88ff5a0702af" containerName="container-00" Sep 30 11:43:26 crc kubenswrapper[4730]: I0930 11:43:26.960870 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d6661dc-46cf-41a2-a49f-88ff5a0702af" containerName="container-00" Sep 30 11:43:26 crc kubenswrapper[4730]: I0930 11:43:26.961106 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d6661dc-46cf-41a2-a49f-88ff5a0702af" containerName="container-00" Sep 30 11:43:26 crc kubenswrapper[4730]: I0930 11:43:26.961956 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/crc-debug-wwsbb" Sep 30 11:43:26 crc kubenswrapper[4730]: I0930 11:43:26.963889 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-jbr8l"/"default-dockercfg-tbwfg" Sep 30 11:43:27 crc kubenswrapper[4730]: I0930 11:43:27.033126 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dafbcb33-c25b-4b4e-91aa-8926214b0dae-host\") pod \"crc-debug-wwsbb\" (UID: \"dafbcb33-c25b-4b4e-91aa-8926214b0dae\") " pod="openshift-must-gather-jbr8l/crc-debug-wwsbb" Sep 30 11:43:27 crc kubenswrapper[4730]: I0930 11:43:27.033208 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97vj6\" (UniqueName: \"kubernetes.io/projected/dafbcb33-c25b-4b4e-91aa-8926214b0dae-kube-api-access-97vj6\") pod \"crc-debug-wwsbb\" (UID: \"dafbcb33-c25b-4b4e-91aa-8926214b0dae\") " pod="openshift-must-gather-jbr8l/crc-debug-wwsbb" Sep 30 11:43:27 crc kubenswrapper[4730]: I0930 11:43:27.135026 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dafbcb33-c25b-4b4e-91aa-8926214b0dae-host\") pod \"crc-debug-wwsbb\" (UID: \"dafbcb33-c25b-4b4e-91aa-8926214b0dae\") " pod="openshift-must-gather-jbr8l/crc-debug-wwsbb" Sep 30 11:43:27 crc kubenswrapper[4730]: I0930 11:43:27.135100 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97vj6\" (UniqueName: \"kubernetes.io/projected/dafbcb33-c25b-4b4e-91aa-8926214b0dae-kube-api-access-97vj6\") pod \"crc-debug-wwsbb\" (UID: \"dafbcb33-c25b-4b4e-91aa-8926214b0dae\") " pod="openshift-must-gather-jbr8l/crc-debug-wwsbb" Sep 30 11:43:27 crc kubenswrapper[4730]: I0930 11:43:27.135110 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dafbcb33-c25b-4b4e-91aa-8926214b0dae-host\") pod \"crc-debug-wwsbb\" (UID: \"dafbcb33-c25b-4b4e-91aa-8926214b0dae\") " pod="openshift-must-gather-jbr8l/crc-debug-wwsbb" Sep 30 11:43:27 crc kubenswrapper[4730]: I0930 11:43:27.154353 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97vj6\" (UniqueName: \"kubernetes.io/projected/dafbcb33-c25b-4b4e-91aa-8926214b0dae-kube-api-access-97vj6\") pod \"crc-debug-wwsbb\" (UID: \"dafbcb33-c25b-4b4e-91aa-8926214b0dae\") " pod="openshift-must-gather-jbr8l/crc-debug-wwsbb" Sep 30 11:43:27 crc kubenswrapper[4730]: I0930 11:43:27.282314 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/crc-debug-wwsbb" Sep 30 11:43:28 crc kubenswrapper[4730]: I0930 11:43:28.302492 4730 generic.go:334] "Generic (PLEG): container finished" podID="dafbcb33-c25b-4b4e-91aa-8926214b0dae" containerID="ee3cf2642b6cb466b2fc121407065b46786eb2e443fa98f72dc858f91b5267f7" exitCode=0 Sep 30 11:43:28 crc kubenswrapper[4730]: I0930 11:43:28.302565 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jbr8l/crc-debug-wwsbb" event={"ID":"dafbcb33-c25b-4b4e-91aa-8926214b0dae","Type":"ContainerDied","Data":"ee3cf2642b6cb466b2fc121407065b46786eb2e443fa98f72dc858f91b5267f7"} Sep 30 11:43:28 crc kubenswrapper[4730]: I0930 11:43:28.302891 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jbr8l/crc-debug-wwsbb" event={"ID":"dafbcb33-c25b-4b4e-91aa-8926214b0dae","Type":"ContainerStarted","Data":"89c0f1b78747c09efb002acd1d20c06c2ced1b7109471aa44ea60a618ab5e401"} Sep 30 11:43:28 crc kubenswrapper[4730]: I0930 11:43:28.349537 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jbr8l/crc-debug-wwsbb"] Sep 30 11:43:28 crc kubenswrapper[4730]: I0930 11:43:28.358062 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jbr8l/crc-debug-wwsbb"] Sep 30 11:43:29 crc kubenswrapper[4730]: I0930 11:43:29.427850 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/crc-debug-wwsbb" Sep 30 11:43:29 crc kubenswrapper[4730]: I0930 11:43:29.481768 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dafbcb33-c25b-4b4e-91aa-8926214b0dae-host\") pod \"dafbcb33-c25b-4b4e-91aa-8926214b0dae\" (UID: \"dafbcb33-c25b-4b4e-91aa-8926214b0dae\") " Sep 30 11:43:29 crc kubenswrapper[4730]: I0930 11:43:29.481898 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dafbcb33-c25b-4b4e-91aa-8926214b0dae-host" (OuterVolumeSpecName: "host") pod "dafbcb33-c25b-4b4e-91aa-8926214b0dae" (UID: "dafbcb33-c25b-4b4e-91aa-8926214b0dae"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 11:43:29 crc kubenswrapper[4730]: I0930 11:43:29.482269 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97vj6\" (UniqueName: \"kubernetes.io/projected/dafbcb33-c25b-4b4e-91aa-8926214b0dae-kube-api-access-97vj6\") pod \"dafbcb33-c25b-4b4e-91aa-8926214b0dae\" (UID: \"dafbcb33-c25b-4b4e-91aa-8926214b0dae\") " Sep 30 11:43:29 crc kubenswrapper[4730]: I0930 11:43:29.484431 4730 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dafbcb33-c25b-4b4e-91aa-8926214b0dae-host\") on node \"crc\" DevicePath \"\"" Sep 30 11:43:29 crc kubenswrapper[4730]: I0930 11:43:29.487791 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dafbcb33-c25b-4b4e-91aa-8926214b0dae-kube-api-access-97vj6" (OuterVolumeSpecName: "kube-api-access-97vj6") pod "dafbcb33-c25b-4b4e-91aa-8926214b0dae" (UID: "dafbcb33-c25b-4b4e-91aa-8926214b0dae"). InnerVolumeSpecName "kube-api-access-97vj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:43:29 crc kubenswrapper[4730]: I0930 11:43:29.586906 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97vj6\" (UniqueName: \"kubernetes.io/projected/dafbcb33-c25b-4b4e-91aa-8926214b0dae-kube-api-access-97vj6\") on node \"crc\" DevicePath \"\"" Sep 30 11:43:29 crc kubenswrapper[4730]: I0930 11:43:29.884494 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-nk8jc_978da32e-9bbe-453d-ba3f-32a89f23550e/kube-rbac-proxy/0.log" Sep 30 11:43:29 crc kubenswrapper[4730]: I0930 11:43:29.937554 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-nk8jc_978da32e-9bbe-453d-ba3f-32a89f23550e/manager/0.log" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.059282 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-qx68r_04589829-1e63-438e-b6e8-bdaa6f5ebc19/kube-rbac-proxy/0.log" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.115165 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-qx68r_04589829-1e63-438e-b6e8-bdaa6f5ebc19/manager/0.log" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.256479 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/util/0.log" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.320078 4730 scope.go:117] "RemoveContainer" containerID="ee3cf2642b6cb466b2fc121407065b46786eb2e443fa98f72dc858f91b5267f7" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.320219 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/crc-debug-wwsbb" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.374445 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/util/0.log" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.393328 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dafbcb33-c25b-4b4e-91aa-8926214b0dae" path="/var/lib/kubelet/pods/dafbcb33-c25b-4b4e-91aa-8926214b0dae/volumes" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.408074 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/pull/0.log" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.426360 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/pull/0.log" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.573096 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/pull/0.log" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.584492 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/util/0.log" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.604394 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d6969747c50f23c1f614e69e237b778b2a9a353a3fb947849eaf84792f4r9zp_e7753738-376e-4cbd-ad5b-42b0bc98a1a8/extract/0.log" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.782182 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-tglx9_2758692b-990d-4330-9765-22614cd379a0/manager/0.log" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.787182 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-tglx9_2758692b-990d-4330-9765-22614cd379a0/kube-rbac-proxy/0.log" Sep 30 11:43:30 crc kubenswrapper[4730]: I0930 11:43:30.833116 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-jdfxz_0f748696-3e59-4b53-a5d2-1dce4b0b6a3a/kube-rbac-proxy/0.log" Sep 30 11:43:31 crc kubenswrapper[4730]: I0930 11:43:31.008179 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-p957g_419a8cdd-e50e-42f8-b913-61214be0a9a5/kube-rbac-proxy/0.log" Sep 30 11:43:31 crc kubenswrapper[4730]: I0930 11:43:31.039122 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-jdfxz_0f748696-3e59-4b53-a5d2-1dce4b0b6a3a/manager/0.log" Sep 30 11:43:31 crc kubenswrapper[4730]: I0930 11:43:31.048654 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-p957g_419a8cdd-e50e-42f8-b913-61214be0a9a5/manager/0.log" Sep 30 11:43:31 crc kubenswrapper[4730]: I0930 11:43:31.228758 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-mwgsf_db4dd5c5-bcc5-4782-acf0-42d686edd287/manager/0.log" Sep 30 11:43:31 crc kubenswrapper[4730]: I0930 11:43:31.235647 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-mwgsf_db4dd5c5-bcc5-4782-acf0-42d686edd287/kube-rbac-proxy/0.log" Sep 30 11:43:31 crc kubenswrapper[4730]: I0930 11:43:31.412356 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-rxq94_4201b1a7-e458-49b2-9536-91e6db49ea36/kube-rbac-proxy/0.log" Sep 30 11:43:31 crc kubenswrapper[4730]: I0930 11:43:31.420050 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-72mvr_3cc5b4f8-09e7-44a7-aa40-f173ad8fb157/kube-rbac-proxy/0.log" Sep 30 11:43:31 crc kubenswrapper[4730]: I0930 11:43:31.618874 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-rxq94_4201b1a7-e458-49b2-9536-91e6db49ea36/manager/0.log" Sep 30 11:43:31 crc kubenswrapper[4730]: I0930 11:43:31.652791 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-qsgdr_c24a4e1a-10db-44f2-9de6-16f4081a5609/kube-rbac-proxy/0.log" Sep 30 11:43:31 crc kubenswrapper[4730]: I0930 11:43:31.679178 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-72mvr_3cc5b4f8-09e7-44a7-aa40-f173ad8fb157/manager/0.log" Sep 30 11:43:31 crc kubenswrapper[4730]: I0930 11:43:31.833847 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-qsgdr_c24a4e1a-10db-44f2-9de6-16f4081a5609/manager/0.log" Sep 30 11:43:31 crc kubenswrapper[4730]: I0930 11:43:31.860836 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-bgczv_27424124-82bf-42fa-a77b-fdbd44f5c24b/manager/0.log" Sep 30 11:43:31 crc kubenswrapper[4730]: I0930 11:43:31.863806 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-bgczv_27424124-82bf-42fa-a77b-fdbd44f5c24b/kube-rbac-proxy/0.log" Sep 30 11:43:32 crc kubenswrapper[4730]: I0930 11:43:32.000077 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-pr4r7_81c8b722-d28f-42d4-8bc0-b82b9eb34500/kube-rbac-proxy/0.log" Sep 30 11:43:32 crc kubenswrapper[4730]: I0930 11:43:32.045642 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-pr4r7_81c8b722-d28f-42d4-8bc0-b82b9eb34500/manager/0.log" Sep 30 11:43:32 crc kubenswrapper[4730]: I0930 11:43:32.180996 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-6v77r_8c6c5d61-fd4e-4b83-9b60-7681c6fc19f3/kube-rbac-proxy/0.log" Sep 30 11:43:32 crc kubenswrapper[4730]: I0930 11:43:32.199078 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-6v77r_8c6c5d61-fd4e-4b83-9b60-7681c6fc19f3/manager/0.log" Sep 30 11:43:32 crc kubenswrapper[4730]: I0930 11:43:32.324656 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-z5tb9_7ed8122e-042b-4574-9522-99557d55eedc/kube-rbac-proxy/0.log" Sep 30 11:43:32 crc kubenswrapper[4730]: I0930 11:43:32.450980 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-z5tb9_7ed8122e-042b-4574-9522-99557d55eedc/manager/0.log" Sep 30 11:43:32 crc kubenswrapper[4730]: I0930 11:43:32.484778 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-g7zc5_0f10a085-8ce3-407b-a2ec-b6fabc38bc9f/kube-rbac-proxy/0.log" Sep 30 11:43:32 crc kubenswrapper[4730]: I0930 11:43:32.543937 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-g7zc5_0f10a085-8ce3-407b-a2ec-b6fabc38bc9f/manager/0.log" Sep 30 11:43:32 crc kubenswrapper[4730]: I0930 11:43:32.687698 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-b5jlg_b6969510-2750-4466-b064-7cb67a4acf7e/kube-rbac-proxy/0.log" Sep 30 11:43:32 crc kubenswrapper[4730]: I0930 11:43:32.719524 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-b5jlg_b6969510-2750-4466-b064-7cb67a4acf7e/manager/0.log" Sep 30 11:43:32 crc kubenswrapper[4730]: I0930 11:43:32.875232 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-79b5487686-nzxks_dc289df2-abec-4f24-a873-82523204cb2b/kube-rbac-proxy/0.log" Sep 30 11:43:32 crc kubenswrapper[4730]: I0930 11:43:32.962361 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-f85b56ffc-k4dxb_020b365e-9b85-4464-b95f-47e12c8812c5/kube-rbac-proxy/0.log" Sep 30 11:43:33 crc kubenswrapper[4730]: I0930 11:43:33.232103 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-b5xnk_6c48593a-c4fc-49ae-a91b-4a5f57667d3f/registry-server/0.log" Sep 30 11:43:33 crc kubenswrapper[4730]: I0930 11:43:33.295452 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-f85b56ffc-k4dxb_020b365e-9b85-4464-b95f-47e12c8812c5/operator/0.log" Sep 30 11:43:33 crc kubenswrapper[4730]: I0930 11:43:33.514173 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-tsgfd_f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e/kube-rbac-proxy/0.log" Sep 30 11:43:33 crc kubenswrapper[4730]: I0930 11:43:33.524831 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-tsgfd_f4fe55ab-8eac-4f9a-9f5f-c70a91fd261e/manager/0.log" Sep 30 11:43:33 crc kubenswrapper[4730]: I0930 11:43:33.638111 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-zbmbb_28fcf9ea-7f63-4add-bb31-99af57fcce2c/kube-rbac-proxy/0.log" Sep 30 11:43:33 crc kubenswrapper[4730]: I0930 11:43:33.800660 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-d4vnn_f26267a9-08cf-4ff8-8fab-d1bfe01dbd65/operator/0.log" Sep 30 11:43:33 crc kubenswrapper[4730]: I0930 11:43:33.806938 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-zbmbb_28fcf9ea-7f63-4add-bb31-99af57fcce2c/manager/0.log" Sep 30 11:43:34 crc kubenswrapper[4730]: I0930 11:43:34.078917 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-dsvl6_a081f8cc-4fb8-457c-84de-2c7ba2c84821/manager/0.log" Sep 30 11:43:34 crc kubenswrapper[4730]: I0930 11:43:34.085342 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-dsvl6_a081f8cc-4fb8-457c-84de-2c7ba2c84821/kube-rbac-proxy/0.log" Sep 30 11:43:34 crc kubenswrapper[4730]: I0930 11:43:34.196053 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-nwvfh_e6074e1b-4192-43a7-b391-f4112d2486bf/kube-rbac-proxy/0.log" Sep 30 11:43:34 crc kubenswrapper[4730]: I0930 11:43:34.241960 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-79b5487686-nzxks_dc289df2-abec-4f24-a873-82523204cb2b/manager/0.log" Sep 30 11:43:34 crc kubenswrapper[4730]: I0930 11:43:34.289067 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-djzc5_f3fcce5a-2080-44f6-971c-d1bda3dd0fe0/kube-rbac-proxy/0.log" Sep 30 11:43:34 crc kubenswrapper[4730]: I0930 11:43:34.371763 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-djzc5_f3fcce5a-2080-44f6-971c-d1bda3dd0fe0/manager/0.log" Sep 30 11:43:34 crc kubenswrapper[4730]: I0930 11:43:34.478300 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6c4b8dd4dc-tbxsc_ba719558-c698-41e9-8b5e-a3449a6f9a7c/kube-rbac-proxy/0.log" Sep 30 11:43:34 crc kubenswrapper[4730]: I0930 11:43:34.561327 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-nwvfh_e6074e1b-4192-43a7-b391-f4112d2486bf/manager/0.log" Sep 30 11:43:34 crc kubenswrapper[4730]: I0930 11:43:34.578675 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6c4b8dd4dc-tbxsc_ba719558-c698-41e9-8b5e-a3449a6f9a7c/manager/0.log" Sep 30 11:43:49 crc kubenswrapper[4730]: I0930 11:43:49.404328 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-27l2q_eb3285d0-7f84-46d0-9c21-136e077b813a/control-plane-machine-set-operator/0.log" Sep 30 11:43:49 crc kubenswrapper[4730]: I0930 11:43:49.571478 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-r86xc_789ee928-afa8-424d-8810-6a04b2a7d5d6/kube-rbac-proxy/0.log" Sep 30 11:43:49 crc kubenswrapper[4730]: I0930 11:43:49.586640 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-r86xc_789ee928-afa8-424d-8810-6a04b2a7d5d6/machine-api-operator/0.log" Sep 30 11:44:01 crc kubenswrapper[4730]: I0930 11:44:01.167644 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-k5vht_66889f3c-938c-46e9-a430-801bb731b19e/cert-manager-controller/0.log" Sep 30 11:44:01 crc kubenswrapper[4730]: I0930 11:44:01.293346 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-9lmxl_5461a244-b4c4-48fb-9590-ebc310a13761/cert-manager-cainjector/0.log" Sep 30 11:44:01 crc kubenswrapper[4730]: I0930 11:44:01.327267 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-p92bh_687b4645-7901-4987-adde-e3db6b502a52/cert-manager-webhook/0.log" Sep 30 11:44:13 crc kubenswrapper[4730]: I0930 11:44:13.289204 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-2tzsp_63a29940-b6e3-47cf-b71b-f010806ae889/nmstate-console-plugin/0.log" Sep 30 11:44:13 crc kubenswrapper[4730]: I0930 11:44:13.352855 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-5j4jt_a4994298-b2c8-4c02-9196-58d0cd805da1/nmstate-handler/0.log" Sep 30 11:44:13 crc kubenswrapper[4730]: I0930 11:44:13.470755 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-5pc6s_24231826-1571-4b73-ae50-bc95035399b2/kube-rbac-proxy/0.log" Sep 30 11:44:13 crc kubenswrapper[4730]: I0930 11:44:13.471508 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-5pc6s_24231826-1571-4b73-ae50-bc95035399b2/nmstate-metrics/0.log" Sep 30 11:44:13 crc kubenswrapper[4730]: I0930 11:44:13.586770 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-krpvk_4d5a580e-a60b-4854-838b-f51fb9e32536/nmstate-operator/0.log" Sep 30 11:44:13 crc kubenswrapper[4730]: I0930 11:44:13.654139 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-6znsv_05ca167b-1d36-4bd0-82f0-07b82f5e9a7d/nmstate-webhook/0.log" Sep 30 11:44:27 crc kubenswrapper[4730]: I0930 11:44:27.361534 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-hp2xq_b5cc3ceb-ad9c-4b2d-b272-913a1856afcc/kube-rbac-proxy/0.log" Sep 30 11:44:27 crc kubenswrapper[4730]: I0930 11:44:27.560056 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-hp2xq_b5cc3ceb-ad9c-4b2d-b272-913a1856afcc/controller/0.log" Sep 30 11:44:27 crc kubenswrapper[4730]: I0930 11:44:27.614190 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-frr-files/0.log" Sep 30 11:44:27 crc kubenswrapper[4730]: I0930 11:44:27.766686 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-frr-files/0.log" Sep 30 11:44:27 crc kubenswrapper[4730]: I0930 11:44:27.782643 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-reloader/0.log" Sep 30 11:44:27 crc kubenswrapper[4730]: I0930 11:44:27.796356 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-metrics/0.log" Sep 30 11:44:27 crc kubenswrapper[4730]: I0930 11:44:27.807551 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-reloader/0.log" Sep 30 11:44:27 crc kubenswrapper[4730]: I0930 11:44:27.950449 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-frr-files/0.log" Sep 30 11:44:27 crc kubenswrapper[4730]: I0930 11:44:27.965596 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-metrics/0.log" Sep 30 11:44:27 crc kubenswrapper[4730]: I0930 11:44:27.981324 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-metrics/0.log" Sep 30 11:44:27 crc kubenswrapper[4730]: I0930 11:44:27.996567 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-reloader/0.log" Sep 30 11:44:28 crc kubenswrapper[4730]: I0930 11:44:28.175698 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-reloader/0.log" Sep 30 11:44:28 crc kubenswrapper[4730]: I0930 11:44:28.197330 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-frr-files/0.log" Sep 30 11:44:28 crc kubenswrapper[4730]: I0930 11:44:28.207315 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/cp-metrics/0.log" Sep 30 11:44:28 crc kubenswrapper[4730]: I0930 11:44:28.221299 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/controller/0.log" Sep 30 11:44:28 crc kubenswrapper[4730]: I0930 11:44:28.347730 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/frr-metrics/0.log" Sep 30 11:44:28 crc kubenswrapper[4730]: I0930 11:44:28.373034 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/kube-rbac-proxy/0.log" Sep 30 11:44:28 crc kubenswrapper[4730]: I0930 11:44:28.444207 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/kube-rbac-proxy-frr/0.log" Sep 30 11:44:28 crc kubenswrapper[4730]: I0930 11:44:28.574324 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/reloader/0.log" Sep 30 11:44:28 crc kubenswrapper[4730]: I0930 11:44:28.644732 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-w96cg_c7e58e36-b0e5-4531-9e55-bf09a14d556e/frr-k8s-webhook-server/0.log" Sep 30 11:44:28 crc kubenswrapper[4730]: I0930 11:44:28.866168 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-574c858f4-cvlx7_b7580b03-c29b-4b03-84c7-726fecd55064/manager/0.log" Sep 30 11:44:29 crc kubenswrapper[4730]: I0930 11:44:29.051182 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-666698b878-dkzwg_5c1b3278-f9f8-41c3-a42f-d789aaaba651/webhook-server/0.log" Sep 30 11:44:29 crc kubenswrapper[4730]: I0930 11:44:29.095275 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bgftk_fac4c47c-e141-4fab-a69b-de3467d806ce/kube-rbac-proxy/0.log" Sep 30 11:44:29 crc kubenswrapper[4730]: I0930 11:44:29.755778 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-bgftk_fac4c47c-e141-4fab-a69b-de3467d806ce/speaker/0.log" Sep 30 11:44:30 crc kubenswrapper[4730]: I0930 11:44:30.129658 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rwcp7_7718ff95-7f9a-46a9-a0f9-259e60a9f142/frr/0.log" Sep 30 11:44:42 crc kubenswrapper[4730]: I0930 11:44:42.347527 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/util/0.log" Sep 30 11:44:42 crc kubenswrapper[4730]: I0930 11:44:42.536821 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/pull/0.log" Sep 30 11:44:42 crc kubenswrapper[4730]: I0930 11:44:42.558424 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/pull/0.log" Sep 30 11:44:42 crc kubenswrapper[4730]: I0930 11:44:42.598492 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/util/0.log" Sep 30 11:44:42 crc kubenswrapper[4730]: I0930 11:44:42.718533 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/util/0.log" Sep 30 11:44:42 crc kubenswrapper[4730]: I0930 11:44:42.723586 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/pull/0.log" Sep 30 11:44:42 crc kubenswrapper[4730]: I0930 11:44:42.749089 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcmkqnf_d8fd2777-1d8a-4947-a1c0-686fdc13c679/extract/0.log" Sep 30 11:44:42 crc kubenswrapper[4730]: I0930 11:44:42.898546 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/util/0.log" Sep 30 11:44:43 crc kubenswrapper[4730]: I0930 11:44:43.087935 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/util/0.log" Sep 30 11:44:43 crc kubenswrapper[4730]: I0930 11:44:43.096346 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/pull/0.log" Sep 30 11:44:43 crc kubenswrapper[4730]: I0930 11:44:43.107795 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/pull/0.log" Sep 30 11:44:43 crc kubenswrapper[4730]: I0930 11:44:43.239270 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/pull/0.log" Sep 30 11:44:43 crc kubenswrapper[4730]: I0930 11:44:43.244088 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/util/0.log" Sep 30 11:44:43 crc kubenswrapper[4730]: I0930 11:44:43.294407 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d6b9pq_76182bda-e874-4b16-9a53-164f47f7ccb5/extract/0.log" Sep 30 11:44:43 crc kubenswrapper[4730]: I0930 11:44:43.417555 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/extract-utilities/0.log" Sep 30 11:44:43 crc kubenswrapper[4730]: I0930 11:44:43.616182 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/extract-utilities/0.log" Sep 30 11:44:43 crc kubenswrapper[4730]: I0930 11:44:43.650213 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/extract-content/0.log" Sep 30 11:44:43 crc kubenswrapper[4730]: I0930 11:44:43.656774 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/extract-content/0.log" Sep 30 11:44:43 crc kubenswrapper[4730]: I0930 11:44:43.882412 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/extract-utilities/0.log" Sep 30 11:44:43 crc kubenswrapper[4730]: I0930 11:44:43.906981 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/extract-content/0.log" Sep 30 11:44:44 crc kubenswrapper[4730]: I0930 11:44:44.026625 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-zdlqc_a621e0c4-5687-4e20-9c31-11b5ee23f644/registry-server/0.log" Sep 30 11:44:44 crc kubenswrapper[4730]: I0930 11:44:44.086898 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/extract-utilities/0.log" Sep 30 11:44:44 crc kubenswrapper[4730]: I0930 11:44:44.248469 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/extract-content/0.log" Sep 30 11:44:44 crc kubenswrapper[4730]: I0930 11:44:44.256962 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/extract-content/0.log" Sep 30 11:44:44 crc kubenswrapper[4730]: I0930 11:44:44.301318 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/extract-utilities/0.log" Sep 30 11:44:44 crc kubenswrapper[4730]: I0930 11:44:44.441097 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/extract-utilities/0.log" Sep 30 11:44:44 crc kubenswrapper[4730]: I0930 11:44:44.458514 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/extract-content/0.log" Sep 30 11:44:44 crc kubenswrapper[4730]: I0930 11:44:44.654069 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/util/0.log" Sep 30 11:44:44 crc kubenswrapper[4730]: I0930 11:44:44.897533 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/util/0.log" Sep 30 11:44:44 crc kubenswrapper[4730]: I0930 11:44:44.926160 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/pull/0.log" Sep 30 11:44:44 crc kubenswrapper[4730]: I0930 11:44:44.983124 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/pull/0.log" Sep 30 11:44:45 crc kubenswrapper[4730]: I0930 11:44:45.149096 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/util/0.log" Sep 30 11:44:45 crc kubenswrapper[4730]: I0930 11:44:45.152901 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/pull/0.log" Sep 30 11:44:45 crc kubenswrapper[4730]: I0930 11:44:45.189856 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96r4bgv_99980a45-f5ca-428d-b285-bc4f72ff8e28/extract/0.log" Sep 30 11:44:45 crc kubenswrapper[4730]: I0930 11:44:45.351296 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b9wqt_b0b4345a-451e-4895-be55-f2ce12708fa4/registry-server/0.log" Sep 30 11:44:45 crc kubenswrapper[4730]: I0930 11:44:45.400982 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-6gt5z_a28d8376-b2f6-44da-b872-34bd96b74108/marketplace-operator/0.log" Sep 30 11:44:45 crc kubenswrapper[4730]: I0930 11:44:45.545725 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/extract-utilities/0.log" Sep 30 11:44:45 crc kubenswrapper[4730]: I0930 11:44:45.675390 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/extract-content/0.log" Sep 30 11:44:45 crc kubenswrapper[4730]: I0930 11:44:45.722038 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/extract-content/0.log" Sep 30 11:44:45 crc kubenswrapper[4730]: I0930 11:44:45.725102 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/extract-utilities/0.log" Sep 30 11:44:45 crc kubenswrapper[4730]: I0930 11:44:45.851656 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/extract-utilities/0.log" Sep 30 11:44:45 crc kubenswrapper[4730]: I0930 11:44:45.906270 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/extract-content/0.log" Sep 30 11:44:45 crc kubenswrapper[4730]: I0930 11:44:45.996488 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-mdh55_c426809d-752d-4149-8ece-44dceba59124/registry-server/0.log" Sep 30 11:44:46 crc kubenswrapper[4730]: I0930 11:44:46.005550 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/extract-utilities/0.log" Sep 30 11:44:46 crc kubenswrapper[4730]: I0930 11:44:46.146748 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/extract-utilities/0.log" Sep 30 11:44:46 crc kubenswrapper[4730]: I0930 11:44:46.165779 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/extract-content/0.log" Sep 30 11:44:46 crc kubenswrapper[4730]: I0930 11:44:46.178983 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/extract-content/0.log" Sep 30 11:44:46 crc kubenswrapper[4730]: I0930 11:44:46.357713 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/extract-utilities/0.log" Sep 30 11:44:46 crc kubenswrapper[4730]: I0930 11:44:46.358734 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/extract-content/0.log" Sep 30 11:44:46 crc kubenswrapper[4730]: I0930 11:44:46.815632 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-zxfgw_04ec1190-8a48-4c88-8034-6e43d5d27a59/registry-server/0.log" Sep 30 11:44:58 crc kubenswrapper[4730]: I0930 11:44:58.513830 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-kv7bf_ad98526d-6d03-4564-849c-5ae4d06519e2/prometheus-operator/0.log" Sep 30 11:44:58 crc kubenswrapper[4730]: I0930 11:44:58.640663 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6b8dd4c948-b87gk_e1ad7bb1-aa06-43d2-bd4f-1f53a4bc360a/prometheus-operator-admission-webhook/0.log" Sep 30 11:44:58 crc kubenswrapper[4730]: I0930 11:44:58.688598 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6b8dd4c948-rbcn4_5ab94ac6-ffcc-42a9-b00e-f3ef3eb5df1d/prometheus-operator-admission-webhook/0.log" Sep 30 11:44:58 crc kubenswrapper[4730]: I0930 11:44:58.816422 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-b68z8_2b9be7f9-7237-4a96-b0a3-9052ab5b0eea/operator/0.log" Sep 30 11:44:58 crc kubenswrapper[4730]: I0930 11:44:58.859733 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-5gr7s_1f3b0abd-5e6c-4afe-9122-606234241336/perses-operator/0.log" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.166757 4730 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx"] Sep 30 11:45:00 crc kubenswrapper[4730]: E0930 11:45:00.167496 4730 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dafbcb33-c25b-4b4e-91aa-8926214b0dae" containerName="container-00" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.167538 4730 state_mem.go:107] "Deleted CPUSet assignment" podUID="dafbcb33-c25b-4b4e-91aa-8926214b0dae" containerName="container-00" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.168037 4730 memory_manager.go:354] "RemoveStaleState removing state" podUID="dafbcb33-c25b-4b4e-91aa-8926214b0dae" containerName="container-00" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.169500 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.172125 4730 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.172306 4730 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.178169 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx"] Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.246616 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/264d19bb-68ab-4f61-8468-14a67d87a5dc-secret-volume\") pod \"collect-profiles-29320545-8fvcx\" (UID: \"264d19bb-68ab-4f61-8468-14a67d87a5dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.246773 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gks2j\" (UniqueName: \"kubernetes.io/projected/264d19bb-68ab-4f61-8468-14a67d87a5dc-kube-api-access-gks2j\") pod \"collect-profiles-29320545-8fvcx\" (UID: \"264d19bb-68ab-4f61-8468-14a67d87a5dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.246903 4730 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/264d19bb-68ab-4f61-8468-14a67d87a5dc-config-volume\") pod \"collect-profiles-29320545-8fvcx\" (UID: \"264d19bb-68ab-4f61-8468-14a67d87a5dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.349025 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/264d19bb-68ab-4f61-8468-14a67d87a5dc-secret-volume\") pod \"collect-profiles-29320545-8fvcx\" (UID: \"264d19bb-68ab-4f61-8468-14a67d87a5dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.349190 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gks2j\" (UniqueName: \"kubernetes.io/projected/264d19bb-68ab-4f61-8468-14a67d87a5dc-kube-api-access-gks2j\") pod \"collect-profiles-29320545-8fvcx\" (UID: \"264d19bb-68ab-4f61-8468-14a67d87a5dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.349221 4730 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/264d19bb-68ab-4f61-8468-14a67d87a5dc-config-volume\") pod \"collect-profiles-29320545-8fvcx\" (UID: \"264d19bb-68ab-4f61-8468-14a67d87a5dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.350017 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/264d19bb-68ab-4f61-8468-14a67d87a5dc-config-volume\") pod \"collect-profiles-29320545-8fvcx\" (UID: \"264d19bb-68ab-4f61-8468-14a67d87a5dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.363627 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/264d19bb-68ab-4f61-8468-14a67d87a5dc-secret-volume\") pod \"collect-profiles-29320545-8fvcx\" (UID: \"264d19bb-68ab-4f61-8468-14a67d87a5dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.373253 4730 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gks2j\" (UniqueName: \"kubernetes.io/projected/264d19bb-68ab-4f61-8468-14a67d87a5dc-kube-api-access-gks2j\") pod \"collect-profiles-29320545-8fvcx\" (UID: \"264d19bb-68ab-4f61-8468-14a67d87a5dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.493270 4730 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" Sep 30 11:45:00 crc kubenswrapper[4730]: I0930 11:45:00.953497 4730 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx"] Sep 30 11:45:01 crc kubenswrapper[4730]: I0930 11:45:01.266359 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" event={"ID":"264d19bb-68ab-4f61-8468-14a67d87a5dc","Type":"ContainerStarted","Data":"240d691d651eb754d91439a2c4e8ad90d7e83918697c58fd486033180da5c52e"} Sep 30 11:45:01 crc kubenswrapper[4730]: I0930 11:45:01.266721 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" event={"ID":"264d19bb-68ab-4f61-8468-14a67d87a5dc","Type":"ContainerStarted","Data":"fcc5470ef39d18cf04509251a9172da6d934a0c2dfe2f1d9fb6e98bcc682d2c0"} Sep 30 11:45:01 crc kubenswrapper[4730]: I0930 11:45:01.289361 4730 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" podStartSLOduration=1.289341016 podStartE2EDuration="1.289341016s" podCreationTimestamp="2025-09-30 11:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 11:45:01.283746588 +0000 UTC m=+6945.617006591" watchObservedRunningTime="2025-09-30 11:45:01.289341016 +0000 UTC m=+6945.622601009" Sep 30 11:45:02 crc kubenswrapper[4730]: I0930 11:45:02.280015 4730 generic.go:334] "Generic (PLEG): container finished" podID="264d19bb-68ab-4f61-8468-14a67d87a5dc" containerID="240d691d651eb754d91439a2c4e8ad90d7e83918697c58fd486033180da5c52e" exitCode=0 Sep 30 11:45:02 crc kubenswrapper[4730]: I0930 11:45:02.280239 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" event={"ID":"264d19bb-68ab-4f61-8468-14a67d87a5dc","Type":"ContainerDied","Data":"240d691d651eb754d91439a2c4e8ad90d7e83918697c58fd486033180da5c52e"} Sep 30 11:45:02 crc kubenswrapper[4730]: I0930 11:45:02.339807 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:45:02 crc kubenswrapper[4730]: I0930 11:45:02.339859 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:45:03 crc kubenswrapper[4730]: I0930 11:45:03.723141 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" Sep 30 11:45:03 crc kubenswrapper[4730]: I0930 11:45:03.827214 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/264d19bb-68ab-4f61-8468-14a67d87a5dc-config-volume\") pod \"264d19bb-68ab-4f61-8468-14a67d87a5dc\" (UID: \"264d19bb-68ab-4f61-8468-14a67d87a5dc\") " Sep 30 11:45:03 crc kubenswrapper[4730]: I0930 11:45:03.827297 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/264d19bb-68ab-4f61-8468-14a67d87a5dc-secret-volume\") pod \"264d19bb-68ab-4f61-8468-14a67d87a5dc\" (UID: \"264d19bb-68ab-4f61-8468-14a67d87a5dc\") " Sep 30 11:45:03 crc kubenswrapper[4730]: I0930 11:45:03.827425 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gks2j\" (UniqueName: \"kubernetes.io/projected/264d19bb-68ab-4f61-8468-14a67d87a5dc-kube-api-access-gks2j\") pod \"264d19bb-68ab-4f61-8468-14a67d87a5dc\" (UID: \"264d19bb-68ab-4f61-8468-14a67d87a5dc\") " Sep 30 11:45:03 crc kubenswrapper[4730]: I0930 11:45:03.827989 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/264d19bb-68ab-4f61-8468-14a67d87a5dc-config-volume" (OuterVolumeSpecName: "config-volume") pod "264d19bb-68ab-4f61-8468-14a67d87a5dc" (UID: "264d19bb-68ab-4f61-8468-14a67d87a5dc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 11:45:03 crc kubenswrapper[4730]: I0930 11:45:03.833125 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/264d19bb-68ab-4f61-8468-14a67d87a5dc-kube-api-access-gks2j" (OuterVolumeSpecName: "kube-api-access-gks2j") pod "264d19bb-68ab-4f61-8468-14a67d87a5dc" (UID: "264d19bb-68ab-4f61-8468-14a67d87a5dc"). InnerVolumeSpecName "kube-api-access-gks2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:45:03 crc kubenswrapper[4730]: I0930 11:45:03.833291 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/264d19bb-68ab-4f61-8468-14a67d87a5dc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "264d19bb-68ab-4f61-8468-14a67d87a5dc" (UID: "264d19bb-68ab-4f61-8468-14a67d87a5dc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 11:45:03 crc kubenswrapper[4730]: I0930 11:45:03.929709 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gks2j\" (UniqueName: \"kubernetes.io/projected/264d19bb-68ab-4f61-8468-14a67d87a5dc-kube-api-access-gks2j\") on node \"crc\" DevicePath \"\"" Sep 30 11:45:03 crc kubenswrapper[4730]: I0930 11:45:03.929753 4730 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/264d19bb-68ab-4f61-8468-14a67d87a5dc-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 11:45:03 crc kubenswrapper[4730]: I0930 11:45:03.929766 4730 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/264d19bb-68ab-4f61-8468-14a67d87a5dc-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 11:45:04 crc kubenswrapper[4730]: I0930 11:45:04.313624 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" event={"ID":"264d19bb-68ab-4f61-8468-14a67d87a5dc","Type":"ContainerDied","Data":"fcc5470ef39d18cf04509251a9172da6d934a0c2dfe2f1d9fb6e98bcc682d2c0"} Sep 30 11:45:04 crc kubenswrapper[4730]: I0930 11:45:04.313674 4730 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fcc5470ef39d18cf04509251a9172da6d934a0c2dfe2f1d9fb6e98bcc682d2c0" Sep 30 11:45:04 crc kubenswrapper[4730]: I0930 11:45:04.313774 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29320545-8fvcx" Sep 30 11:45:04 crc kubenswrapper[4730]: I0930 11:45:04.373603 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx"] Sep 30 11:45:04 crc kubenswrapper[4730]: I0930 11:45:04.402534 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29320500-d28lx"] Sep 30 11:45:06 crc kubenswrapper[4730]: I0930 11:45:06.391424 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6868bf94-9e24-4cab-a99b-b180dcd281a5" path="/var/lib/kubelet/pods/6868bf94-9e24-4cab-a99b-b180dcd281a5/volumes" Sep 30 11:45:18 crc kubenswrapper[4730]: E0930 11:45:18.809465 4730 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.104:59316->38.102.83.104:41011: write tcp 38.102.83.104:59316->38.102.83.104:41011: write: broken pipe Sep 30 11:45:32 crc kubenswrapper[4730]: I0930 11:45:32.336843 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:45:32 crc kubenswrapper[4730]: I0930 11:45:32.337372 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:45:33 crc kubenswrapper[4730]: I0930 11:45:33.870602 4730 scope.go:117] "RemoveContainer" containerID="880a617c794f5584919e1df06085d189a09a6bef771d194957c79de0c6e22ca7" Sep 30 11:46:02 crc kubenswrapper[4730]: I0930 11:46:02.336693 4730 patch_prober.go:28] interesting pod/machine-config-daemon-d4zf9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 11:46:02 crc kubenswrapper[4730]: I0930 11:46:02.337067 4730 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 11:46:02 crc kubenswrapper[4730]: I0930 11:46:02.337110 4730 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" Sep 30 11:46:02 crc kubenswrapper[4730]: I0930 11:46:02.337859 4730 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d"} pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 11:46:02 crc kubenswrapper[4730]: I0930 11:46:02.337913 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerName="machine-config-daemon" containerID="cri-o://c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" gracePeriod=600 Sep 30 11:46:02 crc kubenswrapper[4730]: E0930 11:46:02.468278 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:46:03 crc kubenswrapper[4730]: I0930 11:46:03.027116 4730 generic.go:334] "Generic (PLEG): container finished" podID="95bd4436-8399-478d-9552-c9ba5ae8f327" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" exitCode=0 Sep 30 11:46:03 crc kubenswrapper[4730]: I0930 11:46:03.027169 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" event={"ID":"95bd4436-8399-478d-9552-c9ba5ae8f327","Type":"ContainerDied","Data":"c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d"} Sep 30 11:46:03 crc kubenswrapper[4730]: I0930 11:46:03.027451 4730 scope.go:117] "RemoveContainer" containerID="f8da3276b1fecb6794ec8f0d4d10c4ec11116640fed8ffd3e1e680a043c05017" Sep 30 11:46:03 crc kubenswrapper[4730]: I0930 11:46:03.028160 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:46:03 crc kubenswrapper[4730]: E0930 11:46:03.028596 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:46:13 crc kubenswrapper[4730]: I0930 11:46:13.381436 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:46:13 crc kubenswrapper[4730]: E0930 11:46:13.382144 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:46:28 crc kubenswrapper[4730]: I0930 11:46:28.381743 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:46:28 crc kubenswrapper[4730]: E0930 11:46:28.382752 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:46:41 crc kubenswrapper[4730]: I0930 11:46:41.382203 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:46:41 crc kubenswrapper[4730]: E0930 11:46:41.384174 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:46:55 crc kubenswrapper[4730]: I0930 11:46:55.381919 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:46:55 crc kubenswrapper[4730]: E0930 11:46:55.384585 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:47:11 crc kubenswrapper[4730]: I0930 11:47:11.381945 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:47:11 crc kubenswrapper[4730]: E0930 11:47:11.382719 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:47:26 crc kubenswrapper[4730]: I0930 11:47:26.394270 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:47:26 crc kubenswrapper[4730]: E0930 11:47:26.395434 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:47:27 crc kubenswrapper[4730]: I0930 11:47:27.082917 4730 generic.go:334] "Generic (PLEG): container finished" podID="9e4da034-b6ff-407c-a38b-3e9bb41095e5" containerID="4aca616e7c530b9bda27d52cb344073ffd89ab92711c37e2d62e32545e969589" exitCode=0 Sep 30 11:47:27 crc kubenswrapper[4730]: I0930 11:47:27.082992 4730 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-jbr8l/must-gather-k8rvv" event={"ID":"9e4da034-b6ff-407c-a38b-3e9bb41095e5","Type":"ContainerDied","Data":"4aca616e7c530b9bda27d52cb344073ffd89ab92711c37e2d62e32545e969589"} Sep 30 11:47:27 crc kubenswrapper[4730]: I0930 11:47:27.084396 4730 scope.go:117] "RemoveContainer" containerID="4aca616e7c530b9bda27d52cb344073ffd89ab92711c37e2d62e32545e969589" Sep 30 11:47:27 crc kubenswrapper[4730]: I0930 11:47:27.333254 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jbr8l_must-gather-k8rvv_9e4da034-b6ff-407c-a38b-3e9bb41095e5/gather/0.log" Sep 30 11:47:38 crc kubenswrapper[4730]: I0930 11:47:38.382668 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:47:38 crc kubenswrapper[4730]: E0930 11:47:38.384177 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:47:42 crc kubenswrapper[4730]: I0930 11:47:42.502433 4730 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-jbr8l/must-gather-k8rvv"] Sep 30 11:47:42 crc kubenswrapper[4730]: I0930 11:47:42.503328 4730 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-jbr8l/must-gather-k8rvv" podUID="9e4da034-b6ff-407c-a38b-3e9bb41095e5" containerName="copy" containerID="cri-o://0a757118e4a12c408d06a5ed7cd1475484e0a20f1f1e944a9d1c20d8d9b7e4c4" gracePeriod=2 Sep 30 11:47:42 crc kubenswrapper[4730]: I0930 11:47:42.514287 4730 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-jbr8l/must-gather-k8rvv"] Sep 30 11:47:42 crc kubenswrapper[4730]: I0930 11:47:42.953568 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jbr8l_must-gather-k8rvv_9e4da034-b6ff-407c-a38b-3e9bb41095e5/copy/0.log" Sep 30 11:47:42 crc kubenswrapper[4730]: I0930 11:47:42.954341 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/must-gather-k8rvv" Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.037689 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbpld\" (UniqueName: \"kubernetes.io/projected/9e4da034-b6ff-407c-a38b-3e9bb41095e5-kube-api-access-pbpld\") pod \"9e4da034-b6ff-407c-a38b-3e9bb41095e5\" (UID: \"9e4da034-b6ff-407c-a38b-3e9bb41095e5\") " Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.038147 4730 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9e4da034-b6ff-407c-a38b-3e9bb41095e5-must-gather-output\") pod \"9e4da034-b6ff-407c-a38b-3e9bb41095e5\" (UID: \"9e4da034-b6ff-407c-a38b-3e9bb41095e5\") " Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.043449 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e4da034-b6ff-407c-a38b-3e9bb41095e5-kube-api-access-pbpld" (OuterVolumeSpecName: "kube-api-access-pbpld") pod "9e4da034-b6ff-407c-a38b-3e9bb41095e5" (UID: "9e4da034-b6ff-407c-a38b-3e9bb41095e5"). InnerVolumeSpecName "kube-api-access-pbpld". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.140540 4730 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbpld\" (UniqueName: \"kubernetes.io/projected/9e4da034-b6ff-407c-a38b-3e9bb41095e5-kube-api-access-pbpld\") on node \"crc\" DevicePath \"\"" Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.237400 4730 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e4da034-b6ff-407c-a38b-3e9bb41095e5-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "9e4da034-b6ff-407c-a38b-3e9bb41095e5" (UID: "9e4da034-b6ff-407c-a38b-3e9bb41095e5"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.242841 4730 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9e4da034-b6ff-407c-a38b-3e9bb41095e5-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.290773 4730 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-jbr8l_must-gather-k8rvv_9e4da034-b6ff-407c-a38b-3e9bb41095e5/copy/0.log" Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.291284 4730 generic.go:334] "Generic (PLEG): container finished" podID="9e4da034-b6ff-407c-a38b-3e9bb41095e5" containerID="0a757118e4a12c408d06a5ed7cd1475484e0a20f1f1e944a9d1c20d8d9b7e4c4" exitCode=143 Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.291366 4730 scope.go:117] "RemoveContainer" containerID="0a757118e4a12c408d06a5ed7cd1475484e0a20f1f1e944a9d1c20d8d9b7e4c4" Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.291377 4730 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-jbr8l/must-gather-k8rvv" Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.336794 4730 scope.go:117] "RemoveContainer" containerID="4aca616e7c530b9bda27d52cb344073ffd89ab92711c37e2d62e32545e969589" Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.415175 4730 scope.go:117] "RemoveContainer" containerID="0a757118e4a12c408d06a5ed7cd1475484e0a20f1f1e944a9d1c20d8d9b7e4c4" Sep 30 11:47:43 crc kubenswrapper[4730]: E0930 11:47:43.415996 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a757118e4a12c408d06a5ed7cd1475484e0a20f1f1e944a9d1c20d8d9b7e4c4\": container with ID starting with 0a757118e4a12c408d06a5ed7cd1475484e0a20f1f1e944a9d1c20d8d9b7e4c4 not found: ID does not exist" containerID="0a757118e4a12c408d06a5ed7cd1475484e0a20f1f1e944a9d1c20d8d9b7e4c4" Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.416039 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a757118e4a12c408d06a5ed7cd1475484e0a20f1f1e944a9d1c20d8d9b7e4c4"} err="failed to get container status \"0a757118e4a12c408d06a5ed7cd1475484e0a20f1f1e944a9d1c20d8d9b7e4c4\": rpc error: code = NotFound desc = could not find container \"0a757118e4a12c408d06a5ed7cd1475484e0a20f1f1e944a9d1c20d8d9b7e4c4\": container with ID starting with 0a757118e4a12c408d06a5ed7cd1475484e0a20f1f1e944a9d1c20d8d9b7e4c4 not found: ID does not exist" Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.416071 4730 scope.go:117] "RemoveContainer" containerID="4aca616e7c530b9bda27d52cb344073ffd89ab92711c37e2d62e32545e969589" Sep 30 11:47:43 crc kubenswrapper[4730]: E0930 11:47:43.416532 4730 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4aca616e7c530b9bda27d52cb344073ffd89ab92711c37e2d62e32545e969589\": container with ID starting with 4aca616e7c530b9bda27d52cb344073ffd89ab92711c37e2d62e32545e969589 not found: ID does not exist" containerID="4aca616e7c530b9bda27d52cb344073ffd89ab92711c37e2d62e32545e969589" Sep 30 11:47:43 crc kubenswrapper[4730]: I0930 11:47:43.416554 4730 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aca616e7c530b9bda27d52cb344073ffd89ab92711c37e2d62e32545e969589"} err="failed to get container status \"4aca616e7c530b9bda27d52cb344073ffd89ab92711c37e2d62e32545e969589\": rpc error: code = NotFound desc = could not find container \"4aca616e7c530b9bda27d52cb344073ffd89ab92711c37e2d62e32545e969589\": container with ID starting with 4aca616e7c530b9bda27d52cb344073ffd89ab92711c37e2d62e32545e969589 not found: ID does not exist" Sep 30 11:47:44 crc kubenswrapper[4730]: I0930 11:47:44.397055 4730 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e4da034-b6ff-407c-a38b-3e9bb41095e5" path="/var/lib/kubelet/pods/9e4da034-b6ff-407c-a38b-3e9bb41095e5/volumes" Sep 30 11:47:52 crc kubenswrapper[4730]: I0930 11:47:52.381649 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:47:52 crc kubenswrapper[4730]: E0930 11:47:52.382510 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:48:04 crc kubenswrapper[4730]: I0930 11:48:04.381854 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:48:04 crc kubenswrapper[4730]: E0930 11:48:04.383033 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:48:16 crc kubenswrapper[4730]: I0930 11:48:16.398154 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:48:16 crc kubenswrapper[4730]: E0930 11:48:16.399534 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:48:30 crc kubenswrapper[4730]: I0930 11:48:30.381249 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:48:30 crc kubenswrapper[4730]: E0930 11:48:30.384991 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:48:44 crc kubenswrapper[4730]: I0930 11:48:44.381503 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:48:44 crc kubenswrapper[4730]: E0930 11:48:44.382434 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" Sep 30 11:48:58 crc kubenswrapper[4730]: I0930 11:48:58.381231 4730 scope.go:117] "RemoveContainer" containerID="c20b9afa81423828ca193928fc6b5614b2ac2701cff18acc7533a75680cd948d" Sep 30 11:48:58 crc kubenswrapper[4730]: E0930 11:48:58.381992 4730 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-d4zf9_openshift-machine-config-operator(95bd4436-8399-478d-9552-c9ba5ae8f327)\"" pod="openshift-machine-config-operator/machine-config-daemon-d4zf9" podUID="95bd4436-8399-478d-9552-c9ba5ae8f327" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515066741670024461 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015066741671017377 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015066723257016522 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015066723257015472 5ustar corecore